1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
19
20 #include <algorithm>
21 #include <array>
22 #include <type_traits>
23
24 #include "art_method.h"
25 #include "base/arena_allocator.h"
26 #include "base/arena_bit_vector.h"
27 #include "base/arena_containers.h"
28 #include "base/arena_object.h"
29 #include "base/array_ref.h"
30 #include "base/intrusive_forward_list.h"
31 #include "base/iteration_range.h"
32 #include "base/macros.h"
33 #include "base/mutex.h"
34 #include "base/quasi_atomic.h"
35 #include "base/stl_util.h"
36 #include "base/transform_array_ref.h"
37 #include "block_namer.h"
38 #include "class_root.h"
39 #include "compilation_kind.h"
40 #include "data_type.h"
41 #include "deoptimization_kind.h"
42 #include "dex/dex_file.h"
43 #include "dex/dex_file_types.h"
44 #include "dex/invoke_type.h"
45 #include "dex/method_reference.h"
46 #include "entrypoints/quick/quick_entrypoints_enum.h"
47 #include "handle.h"
48 #include "handle_scope.h"
49 #include "intrinsics_enum.h"
50 #include "locations.h"
51 #include "mirror/class.h"
52 #include "mirror/method_type.h"
53 #include "offsets.h"
54
55 namespace art HIDDEN {
56
57 class ArenaStack;
58 class CodeGenerator;
59 class GraphChecker;
60 class HBasicBlock;
61 class HConstructorFence;
62 class HCurrentMethod;
63 class HDoubleConstant;
64 class HEnvironment;
65 class HFloatConstant;
66 class HGraphBuilder;
67 class HGraphVisitor;
68 class HInstruction;
69 class HIntConstant;
70 class HInvoke;
71 class HLongConstant;
72 class HNullConstant;
73 class HParameterValue;
74 class HPhi;
75 class HSuspendCheck;
76 class HTryBoundary;
77 class FieldInfo;
78 class LiveInterval;
79 class LocationSummary;
80 class ProfilingInfo;
81 class SlowPathCode;
82 class SsaBuilder;
83
84 namespace mirror {
85 class DexCache;
86 } // namespace mirror
87
88 static const int kDefaultNumberOfBlocks = 8;
89 static const int kDefaultNumberOfSuccessors = 2;
90 static const int kDefaultNumberOfPredecessors = 2;
91 static const int kDefaultNumberOfExceptionalPredecessors = 0;
92 static const int kDefaultNumberOfDominatedBlocks = 1;
93 static const int kDefaultNumberOfBackEdges = 1;
94
95 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
96 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
97 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
98 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
99
100 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
101 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
102
103 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
104
105 static constexpr uint32_t kNoDexPc = -1;
106
IsSameDexFile(const DexFile & lhs,const DexFile & rhs)107 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
108 // For the purposes of the compiler, the dex files must actually be the same object
109 // if we want to safely treat them as the same. This is especially important for JIT
110 // as custom class loaders can open the same underlying file (or memory) multiple
111 // times and provide different class resolution but no two class loaders should ever
112 // use the same DexFile object - doing so is an unsupported hack that can lead to
113 // all sorts of weird failures.
114 return &lhs == &rhs;
115 }
116
117 enum IfCondition {
118 // All types.
119 kCondEQ, // ==
120 kCondNE, // !=
121 // Signed integers and floating-point numbers.
122 kCondLT, // <
123 kCondLE, // <=
124 kCondGT, // >
125 kCondGE, // >=
126 // Unsigned integers.
127 kCondB, // <
128 kCondBE, // <=
129 kCondA, // >
130 kCondAE, // >=
131 // First and last aliases.
132 kCondFirst = kCondEQ,
133 kCondLast = kCondAE,
134 };
135
136 enum GraphAnalysisResult {
137 kAnalysisSkipped,
138 kAnalysisInvalidBytecode,
139 kAnalysisFailThrowCatchLoop,
140 kAnalysisFailAmbiguousArrayOp,
141 kAnalysisFailIrreducibleLoopAndStringInit,
142 kAnalysisFailPhiEquivalentInOsr,
143 kAnalysisSuccess,
144 };
145
146 std::ostream& operator<<(std::ostream& os, GraphAnalysisResult ga);
147
148 template <typename T>
MakeUnsigned(T x)149 static inline typename std::make_unsigned<T>::type MakeUnsigned(T x) {
150 return static_cast<typename std::make_unsigned<T>::type>(x);
151 }
152
153 class HInstructionList : public ValueObject {
154 public:
HInstructionList()155 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
156
157 void AddInstruction(HInstruction* instruction);
158 void RemoveInstruction(HInstruction* instruction);
159
160 // Insert `instruction` before/after an existing instruction `cursor`.
161 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
162 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
163
164 // Return true if this list contains `instruction`.
165 bool Contains(HInstruction* instruction) const;
166
167 // Return true if `instruction1` is found before `instruction2` in
168 // this instruction list and false otherwise. Abort if none
169 // of these instructions is found.
170 bool FoundBefore(const HInstruction* instruction1,
171 const HInstruction* instruction2) const;
172
IsEmpty()173 bool IsEmpty() const { return first_instruction_ == nullptr; }
Clear()174 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
175
176 // Update the block of all instructions to be `block`.
177 void SetBlockOfInstructions(HBasicBlock* block) const;
178
179 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
180 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
181 void Add(const HInstructionList& instruction_list);
182
183 // Return the number of instructions in the list. This is an expensive operation.
184 size_t CountSize() const;
185
186 private:
187 HInstruction* first_instruction_;
188 HInstruction* last_instruction_;
189
190 friend class HBasicBlock;
191 friend class HGraph;
192 friend class HInstruction;
193 friend class HInstructionIterator;
194 friend class HInstructionIteratorHandleChanges;
195 friend class HBackwardInstructionIterator;
196
197 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
198 };
199
200 class ReferenceTypeInfo : ValueObject {
201 public:
202 using TypeHandle = Handle<mirror::Class>;
203
204 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
205
Create(TypeHandle type_handle)206 static ReferenceTypeInfo Create(TypeHandle type_handle) REQUIRES_SHARED(Locks::mutator_lock_) {
207 return Create(type_handle, type_handle->CannotBeAssignedFromOtherTypes());
208 }
209
CreateUnchecked(TypeHandle type_handle,bool is_exact)210 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
211 return ReferenceTypeInfo(type_handle, is_exact);
212 }
213
CreateInvalid()214 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
215
IsValidHandle(TypeHandle handle)216 static bool IsValidHandle(TypeHandle handle) {
217 return handle.GetReference() != nullptr;
218 }
219
IsValid()220 bool IsValid() const {
221 return IsValidHandle(type_handle_);
222 }
223
IsExact()224 bool IsExact() const { return is_exact_; }
225
IsObjectClass()226 bool IsObjectClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
227 DCHECK(IsValid());
228 return GetTypeHandle()->IsObjectClass();
229 }
230
IsStringClass()231 bool IsStringClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
232 DCHECK(IsValid());
233 return GetTypeHandle()->IsStringClass();
234 }
235
IsObjectArray()236 bool IsObjectArray() const REQUIRES_SHARED(Locks::mutator_lock_) {
237 DCHECK(IsValid());
238 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
239 }
240
IsInterface()241 bool IsInterface() const REQUIRES_SHARED(Locks::mutator_lock_) {
242 DCHECK(IsValid());
243 return GetTypeHandle()->IsInterface();
244 }
245
IsArrayClass()246 bool IsArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
247 DCHECK(IsValid());
248 return GetTypeHandle()->IsArrayClass();
249 }
250
IsPrimitiveArrayClass()251 bool IsPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
252 DCHECK(IsValid());
253 return GetTypeHandle()->IsPrimitiveArray();
254 }
255
IsNonPrimitiveArrayClass()256 bool IsNonPrimitiveArrayClass() const REQUIRES_SHARED(Locks::mutator_lock_) {
257 DCHECK(IsValid());
258 return IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
259 }
260
CanArrayHold(ReferenceTypeInfo rti)261 bool CanArrayHold(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
262 DCHECK(IsValid());
263 if (!IsExact()) return false;
264 if (!IsArrayClass()) return false;
265 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
266 }
267
CanArrayHoldValuesOf(ReferenceTypeInfo rti)268 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
269 DCHECK(IsValid());
270 if (!IsExact()) return false;
271 if (!IsArrayClass()) return false;
272 if (!rti.IsArrayClass()) return false;
273 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
274 rti.GetTypeHandle()->GetComponentType());
275 }
276
GetTypeHandle()277 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
278
IsSupertypeOf(ReferenceTypeInfo rti)279 bool IsSupertypeOf(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
280 DCHECK(IsValid());
281 DCHECK(rti.IsValid());
282 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
283 }
284
285 // Returns true if the type information provide the same amount of details.
286 // Note that it does not mean that the instructions have the same actual type
287 // (because the type can be the result of a merge).
IsEqual(ReferenceTypeInfo rti)288 bool IsEqual(ReferenceTypeInfo rti) const REQUIRES_SHARED(Locks::mutator_lock_) {
289 if (!IsValid() && !rti.IsValid()) {
290 // Invalid types are equal.
291 return true;
292 }
293 if (!IsValid() || !rti.IsValid()) {
294 // One is valid, the other not.
295 return false;
296 }
297 return IsExact() == rti.IsExact()
298 && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
299 }
300
301 private:
ReferenceTypeInfo()302 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo(TypeHandle type_handle,bool is_exact)303 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
304 : type_handle_(type_handle), is_exact_(is_exact) { }
305
306 // The class of the object.
307 TypeHandle type_handle_;
308 // Whether or not the type is exact or a superclass of the actual type.
309 // Whether or not we have any information about this type.
310 bool is_exact_;
311 };
312
313 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
314
315 class HandleCache {
316 public:
HandleCache(VariableSizedHandleScope * handles)317 explicit HandleCache(VariableSizedHandleScope* handles) : handles_(handles) { }
318
GetHandles()319 VariableSizedHandleScope* GetHandles() { return handles_; }
320
321 template <typename T>
NewHandle(T * object)322 MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_) {
323 return handles_->NewHandle(object);
324 }
325
326 template <typename T>
NewHandle(ObjPtr<T> object)327 MutableHandle<T> NewHandle(ObjPtr<T> object) REQUIRES_SHARED(Locks::mutator_lock_) {
328 return handles_->NewHandle(object);
329 }
330
GetObjectClassHandle()331 ReferenceTypeInfo::TypeHandle GetObjectClassHandle() {
332 return GetRootHandle(ClassRoot::kJavaLangObject, &object_class_handle_);
333 }
334
GetClassClassHandle()335 ReferenceTypeInfo::TypeHandle GetClassClassHandle() {
336 return GetRootHandle(ClassRoot::kJavaLangClass, &class_class_handle_);
337 }
338
GetMethodHandleClassHandle()339 ReferenceTypeInfo::TypeHandle GetMethodHandleClassHandle() {
340 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodHandleImpl, &method_handle_class_handle_);
341 }
342
GetMethodTypeClassHandle()343 ReferenceTypeInfo::TypeHandle GetMethodTypeClassHandle() {
344 return GetRootHandle(ClassRoot::kJavaLangInvokeMethodType, &method_type_class_handle_);
345 }
346
GetStringClassHandle()347 ReferenceTypeInfo::TypeHandle GetStringClassHandle() {
348 return GetRootHandle(ClassRoot::kJavaLangString, &string_class_handle_);
349 }
350
GetThrowableClassHandle()351 ReferenceTypeInfo::TypeHandle GetThrowableClassHandle() {
352 return GetRootHandle(ClassRoot::kJavaLangThrowable, &throwable_class_handle_);
353 }
354
355
356 private:
GetRootHandle(ClassRoot class_root,ReferenceTypeInfo::TypeHandle * cache)357 inline ReferenceTypeInfo::TypeHandle GetRootHandle(ClassRoot class_root,
358 ReferenceTypeInfo::TypeHandle* cache) {
359 if (UNLIKELY(!ReferenceTypeInfo::IsValidHandle(*cache))) {
360 *cache = CreateRootHandle(handles_, class_root);
361 }
362 return *cache;
363 }
364
365 static ReferenceTypeInfo::TypeHandle CreateRootHandle(VariableSizedHandleScope* handles,
366 ClassRoot class_root);
367
368 VariableSizedHandleScope* handles_;
369
370 ReferenceTypeInfo::TypeHandle object_class_handle_;
371 ReferenceTypeInfo::TypeHandle class_class_handle_;
372 ReferenceTypeInfo::TypeHandle method_handle_class_handle_;
373 ReferenceTypeInfo::TypeHandle method_type_class_handle_;
374 ReferenceTypeInfo::TypeHandle string_class_handle_;
375 ReferenceTypeInfo::TypeHandle throwable_class_handle_;
376 };
377
378 // Control-flow graph of a method. Contains a list of basic blocks.
379 class HGraph : public ArenaObject<kArenaAllocGraph> {
380 public:
381 HGraph(ArenaAllocator* allocator,
382 ArenaStack* arena_stack,
383 VariableSizedHandleScope* handles,
384 const DexFile& dex_file,
385 uint32_t method_idx,
386 InstructionSet instruction_set,
387 InvokeType invoke_type = kInvalidInvokeType,
388 bool dead_reference_safe = false,
389 bool debuggable = false,
390 CompilationKind compilation_kind = CompilationKind::kOptimized,
391 int start_instruction_id = 0)
allocator_(allocator)392 : allocator_(allocator),
393 arena_stack_(arena_stack),
394 handle_cache_(handles),
395 blocks_(allocator->Adapter(kArenaAllocBlockList)),
396 reverse_post_order_(allocator->Adapter(kArenaAllocReversePostOrder)),
397 linear_order_(allocator->Adapter(kArenaAllocLinearOrder)),
398 entry_block_(nullptr),
399 exit_block_(nullptr),
400 maximum_number_of_out_vregs_(0),
401 number_of_vregs_(0),
402 number_of_in_vregs_(0),
403 temporaries_vreg_slots_(0),
404 has_bounds_checks_(false),
405 has_try_catch_(false),
406 has_monitor_operations_(false),
407 has_traditional_simd_(false),
408 has_predicated_simd_(false),
409 has_loops_(false),
410 has_irreducible_loops_(false),
411 has_direct_critical_native_call_(false),
412 has_always_throwing_invokes_(false),
413 dead_reference_safe_(dead_reference_safe),
414 debuggable_(debuggable),
415 current_instruction_id_(start_instruction_id),
416 dex_file_(dex_file),
417 method_idx_(method_idx),
418 invoke_type_(invoke_type),
419 in_ssa_form_(false),
420 number_of_cha_guards_(0),
421 instruction_set_(instruction_set),
422 cached_null_constant_(nullptr),
423 cached_int_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
424 cached_float_constants_(std::less<int32_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
425 cached_long_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
426 cached_double_constants_(std::less<int64_t>(), allocator->Adapter(kArenaAllocConstantsMap)),
427 cached_current_method_(nullptr),
428 art_method_(nullptr),
429 compilation_kind_(compilation_kind),
430 useful_optimizing_(false),
431 cha_single_implementation_list_(allocator->Adapter(kArenaAllocCHA)) {
432 blocks_.reserve(kDefaultNumberOfBlocks);
433 }
434
435 std::ostream& Dump(std::ostream& os,
436 CodeGenerator* codegen,
437 std::optional<std::reference_wrapper<const BlockNamer>> namer = std::nullopt);
438
GetAllocator()439 ArenaAllocator* GetAllocator() const { return allocator_; }
GetArenaStack()440 ArenaStack* GetArenaStack() const { return arena_stack_; }
441
GetHandleCache()442 HandleCache* GetHandleCache() { return &handle_cache_; }
443
GetBlocks()444 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
445
446 // An iterator to only blocks that are still actually in the graph (when
447 // blocks are removed they are replaced with 'nullptr' in GetBlocks to
448 // simplify block-id assignment and avoid memmoves in the block-list).
GetActiveBlocks()449 IterationRange<FilterNull<ArenaVector<HBasicBlock*>::const_iterator>> GetActiveBlocks() const {
450 return FilterOutNull(MakeIterationRange(GetBlocks()));
451 }
452
IsInSsaForm()453 bool IsInSsaForm() const { return in_ssa_form_; }
SetInSsaForm()454 void SetInSsaForm() { in_ssa_form_ = true; }
455
GetEntryBlock()456 HBasicBlock* GetEntryBlock() const { return entry_block_; }
GetExitBlock()457 HBasicBlock* GetExitBlock() const { return exit_block_; }
HasExitBlock()458 bool HasExitBlock() const { return exit_block_ != nullptr; }
459
SetEntryBlock(HBasicBlock * block)460 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
SetExitBlock(HBasicBlock * block)461 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
462
463 void AddBlock(HBasicBlock* block);
464
465 void ComputeDominanceInformation();
466 void ClearDominanceInformation();
467 void ClearLoopInformation();
468 void FindBackEdges(ArenaBitVector* visited);
469 GraphAnalysisResult BuildDominatorTree();
470 GraphAnalysisResult RecomputeDominatorTree();
471 void SimplifyCFG();
472 void SimplifyCatchBlocks();
473
474 // Analyze all natural loops in this graph. Returns a code specifying that it
475 // was successful or the reason for failure. The method will fail if a loop
476 // is a throw-catch loop, i.e. the header is a catch block.
477 GraphAnalysisResult AnalyzeLoops() const;
478
479 // Iterate over blocks to compute try block membership. Needs reverse post
480 // order and loop information.
481 void ComputeTryBlockInformation();
482
483 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
484 // Returns the instruction to replace the invoke expression or null if the
485 // invoke is for a void method. Note that the caller is responsible for replacing
486 // and removing the invoke instruction.
487 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
488
489 // Update the loop and try membership of `block`, which was spawned from `reference`.
490 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
491 // should be the new back edge.
492 // `has_more_specific_try_catch_info` will be set to true when inlining a try catch.
493 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
494 HBasicBlock* reference,
495 bool replace_if_back_edge,
496 bool has_more_specific_try_catch_info = false);
497
498 // Need to add a couple of blocks to test if the loop body is entered and
499 // put deoptimization instructions, etc.
500 void TransformLoopHeaderForBCE(HBasicBlock* header);
501
502 // Adds a new loop directly after the loop with the given header and exit.
503 // Returns the new preheader.
504 HBasicBlock* TransformLoopForVectorization(HBasicBlock* header,
505 HBasicBlock* body,
506 HBasicBlock* exit);
507
508 // Removes `block` from the graph. Assumes `block` has been disconnected from
509 // other blocks and has no instructions or phis.
510 void DeleteDeadEmptyBlock(HBasicBlock* block);
511
512 // Splits the edge between `block` and `successor` while preserving the
513 // indices in the predecessor/successor lists. If there are multiple edges
514 // between the blocks, the lowest indices are used.
515 // Returns the new block which is empty and has the same dex pc as `successor`.
516 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
517
518 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
519
520 // Splits the edge between `block` and `successor` and then updates the graph's RPO to keep
521 // consistency without recomputing the whole graph.
522 HBasicBlock* SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor);
523
524 void OrderLoopHeaderPredecessors(HBasicBlock* header);
525
526 // Transform a loop into a format with a single preheader.
527 //
528 // Each phi in the header should be split: original one in the header should only hold
529 // inputs reachable from the back edges and a single input from the preheader. The newly created
530 // phi in the preheader should collate the inputs from the original multiple incoming blocks.
531 //
532 // Loops in the graph typically have a single preheader, so this method is used to "repair" loops
533 // that no longer have this property.
534 void TransformLoopToSinglePreheaderFormat(HBasicBlock* header);
535
536 void SimplifyLoop(HBasicBlock* header);
537
GetNextInstructionId()538 int32_t GetNextInstructionId() {
539 CHECK_NE(current_instruction_id_, INT32_MAX);
540 return current_instruction_id_++;
541 }
542
GetCurrentInstructionId()543 int32_t GetCurrentInstructionId() const {
544 return current_instruction_id_;
545 }
546
SetCurrentInstructionId(int32_t id)547 void SetCurrentInstructionId(int32_t id) {
548 CHECK_GE(id, current_instruction_id_);
549 current_instruction_id_ = id;
550 }
551
GetMaximumNumberOfOutVRegs()552 uint16_t GetMaximumNumberOfOutVRegs() const {
553 return maximum_number_of_out_vregs_;
554 }
555
SetMaximumNumberOfOutVRegs(uint16_t new_value)556 void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
557 maximum_number_of_out_vregs_ = new_value;
558 }
559
UpdateMaximumNumberOfOutVRegs(uint16_t other_value)560 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
561 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
562 }
563
UpdateTemporariesVRegSlots(size_t slots)564 void UpdateTemporariesVRegSlots(size_t slots) {
565 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
566 }
567
GetTemporariesVRegSlots()568 size_t GetTemporariesVRegSlots() const {
569 DCHECK(!in_ssa_form_);
570 return temporaries_vreg_slots_;
571 }
572
SetNumberOfVRegs(uint16_t number_of_vregs)573 void SetNumberOfVRegs(uint16_t number_of_vregs) {
574 number_of_vregs_ = number_of_vregs;
575 }
576
GetNumberOfVRegs()577 uint16_t GetNumberOfVRegs() const {
578 return number_of_vregs_;
579 }
580
SetNumberOfInVRegs(uint16_t value)581 void SetNumberOfInVRegs(uint16_t value) {
582 number_of_in_vregs_ = value;
583 }
584
GetNumberOfInVRegs()585 uint16_t GetNumberOfInVRegs() const {
586 return number_of_in_vregs_;
587 }
588
GetNumberOfLocalVRegs()589 uint16_t GetNumberOfLocalVRegs() const {
590 DCHECK(!in_ssa_form_);
591 return number_of_vregs_ - number_of_in_vregs_;
592 }
593
GetReversePostOrder()594 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
595 return reverse_post_order_;
596 }
597
GetReversePostOrderSkipEntryBlock()598 ArrayRef<HBasicBlock* const> GetReversePostOrderSkipEntryBlock() const {
599 DCHECK(GetReversePostOrder()[0] == entry_block_);
600 return ArrayRef<HBasicBlock* const>(GetReversePostOrder()).SubArray(1);
601 }
602
GetPostOrder()603 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetPostOrder() const {
604 return ReverseRange(GetReversePostOrder());
605 }
606
GetLinearOrder()607 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
608 return linear_order_;
609 }
610
GetLinearPostOrder()611 IterationRange<ArenaVector<HBasicBlock*>::const_reverse_iterator> GetLinearPostOrder() const {
612 return ReverseRange(GetLinearOrder());
613 }
614
HasBoundsChecks()615 bool HasBoundsChecks() const {
616 return has_bounds_checks_;
617 }
618
SetHasBoundsChecks(bool value)619 void SetHasBoundsChecks(bool value) {
620 has_bounds_checks_ = value;
621 }
622
623 // Is the code known to be robust against eliminating dead references
624 // and the effects of early finalization?
IsDeadReferenceSafe()625 bool IsDeadReferenceSafe() const { return dead_reference_safe_; }
626
MarkDeadReferenceUnsafe()627 void MarkDeadReferenceUnsafe() { dead_reference_safe_ = false; }
628
IsDebuggable()629 bool IsDebuggable() const { return debuggable_; }
630
631 // Returns a constant of the given type and value. If it does not exist
632 // already, it is created and inserted into the graph. This method is only for
633 // integral types.
634 HConstant* GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
635
636 // TODO: This is problematic for the consistency of reference type propagation
637 // because it can be created anytime after the pass and thus it will be left
638 // with an invalid type.
639 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
640
641 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
642 return CreateConstant(value, &cached_int_constants_, dex_pc);
643 }
644 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
645 return CreateConstant(value, &cached_long_constants_, dex_pc);
646 }
647 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
648 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
649 }
650 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
651 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
652 }
653
654 HCurrentMethod* GetCurrentMethod();
655
GetDexFile()656 const DexFile& GetDexFile() const {
657 return dex_file_;
658 }
659
GetMethodIdx()660 uint32_t GetMethodIdx() const {
661 return method_idx_;
662 }
663
664 // Get the method name (without the signature), e.g. "<init>"
665 const char* GetMethodName() const;
666
667 // Get the pretty method name (class + name + optionally signature).
668 std::string PrettyMethod(bool with_signature = true) const;
669
GetInvokeType()670 InvokeType GetInvokeType() const {
671 return invoke_type_;
672 }
673
GetInstructionSet()674 InstructionSet GetInstructionSet() const {
675 return instruction_set_;
676 }
677
IsCompilingOsr()678 bool IsCompilingOsr() const { return compilation_kind_ == CompilationKind::kOsr; }
679
IsCompilingBaseline()680 bool IsCompilingBaseline() const { return compilation_kind_ == CompilationKind::kBaseline; }
681
GetCompilationKind()682 CompilationKind GetCompilationKind() const { return compilation_kind_; }
683
GetCHASingleImplementationList()684 ArenaSet<ArtMethod*>& GetCHASingleImplementationList() {
685 return cha_single_implementation_list_;
686 }
687
688 // In case of OSR we intend to use SuspendChecks as an entry point to the
689 // function; for debuggable graphs we might deoptimize to interpreter from
690 // SuspendChecks. In these cases we should always generate code for them.
SuspendChecksAreAllowedToNoOp()691 bool SuspendChecksAreAllowedToNoOp() const {
692 return !IsDebuggable() && !IsCompilingOsr();
693 }
694
AddCHASingleImplementationDependency(ArtMethod * method)695 void AddCHASingleImplementationDependency(ArtMethod* method) {
696 cha_single_implementation_list_.insert(method);
697 }
698
HasShouldDeoptimizeFlag()699 bool HasShouldDeoptimizeFlag() const {
700 return number_of_cha_guards_ != 0 || debuggable_;
701 }
702
HasTryCatch()703 bool HasTryCatch() const { return has_try_catch_; }
SetHasTryCatch(bool value)704 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
705
HasMonitorOperations()706 bool HasMonitorOperations() const { return has_monitor_operations_; }
SetHasMonitorOperations(bool value)707 void SetHasMonitorOperations(bool value) { has_monitor_operations_ = value; }
708
HasTraditionalSIMD()709 bool HasTraditionalSIMD() { return has_traditional_simd_; }
SetHasTraditionalSIMD(bool value)710 void SetHasTraditionalSIMD(bool value) { has_traditional_simd_ = value; }
711
HasPredicatedSIMD()712 bool HasPredicatedSIMD() { return has_predicated_simd_; }
SetHasPredicatedSIMD(bool value)713 void SetHasPredicatedSIMD(bool value) { has_predicated_simd_ = value; }
714
HasSIMD()715 bool HasSIMD() const { return has_traditional_simd_ || has_predicated_simd_; }
716
HasLoops()717 bool HasLoops() const { return has_loops_; }
SetHasLoops(bool value)718 void SetHasLoops(bool value) { has_loops_ = value; }
719
HasIrreducibleLoops()720 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
SetHasIrreducibleLoops(bool value)721 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
722
HasDirectCriticalNativeCall()723 bool HasDirectCriticalNativeCall() const { return has_direct_critical_native_call_; }
SetHasDirectCriticalNativeCall(bool value)724 void SetHasDirectCriticalNativeCall(bool value) { has_direct_critical_native_call_ = value; }
725
HasAlwaysThrowingInvokes()726 bool HasAlwaysThrowingInvokes() const { return has_always_throwing_invokes_; }
SetHasAlwaysThrowingInvokes(bool value)727 void SetHasAlwaysThrowingInvokes(bool value) { has_always_throwing_invokes_ = value; }
728
GetArtMethod()729 ArtMethod* GetArtMethod() const { return art_method_; }
SetArtMethod(ArtMethod * method)730 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
731
SetProfilingInfo(ProfilingInfo * info)732 void SetProfilingInfo(ProfilingInfo* info) { profiling_info_ = info; }
GetProfilingInfo()733 ProfilingInfo* GetProfilingInfo() const { return profiling_info_; }
734
735 // Returns an instruction with the opposite Boolean value from 'cond'.
736 // The instruction has been inserted into the graph, either as a constant, or
737 // before cursor.
738 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
739
GetInexactObjectRti()740 ReferenceTypeInfo GetInexactObjectRti() {
741 return ReferenceTypeInfo::Create(handle_cache_.GetObjectClassHandle(), /* is_exact= */ false);
742 }
743
GetNumberOfCHAGuards()744 uint32_t GetNumberOfCHAGuards() const { return number_of_cha_guards_; }
SetNumberOfCHAGuards(uint32_t num)745 void SetNumberOfCHAGuards(uint32_t num) { number_of_cha_guards_ = num; }
IncrementNumberOfCHAGuards()746 void IncrementNumberOfCHAGuards() { number_of_cha_guards_++; }
747
SetUsefulOptimizing()748 void SetUsefulOptimizing() { useful_optimizing_ = true; }
IsUsefulOptimizing()749 bool IsUsefulOptimizing() const { return useful_optimizing_; }
750
751 private:
752 void RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector& visited) const;
753 void RemoveDeadBlocks(const ArenaBitVector& visited);
754
755 template <class InstructionType, typename ValueType>
756 InstructionType* CreateConstant(ValueType value,
757 ArenaSafeMap<ValueType, InstructionType*>* cache,
758 uint32_t dex_pc = kNoDexPc) {
759 // Try to find an existing constant of the given value.
760 InstructionType* constant = nullptr;
761 auto cached_constant = cache->find(value);
762 if (cached_constant != cache->end()) {
763 constant = cached_constant->second;
764 }
765
766 // If not found or previously deleted, create and cache a new instruction.
767 // Don't bother reviving a previously deleted instruction, for simplicity.
768 if (constant == nullptr || constant->GetBlock() == nullptr) {
769 constant = new (allocator_) InstructionType(value, dex_pc);
770 cache->Overwrite(value, constant);
771 InsertConstant(constant);
772 }
773 return constant;
774 }
775
776 void InsertConstant(HConstant* instruction);
777
778 // Cache a float constant into the graph. This method should only be
779 // called by the SsaBuilder when creating "equivalent" instructions.
780 void CacheFloatConstant(HFloatConstant* constant);
781
782 // See CacheFloatConstant comment.
783 void CacheDoubleConstant(HDoubleConstant* constant);
784
785 ArenaAllocator* const allocator_;
786 ArenaStack* const arena_stack_;
787
788 HandleCache handle_cache_;
789
790 // List of blocks in insertion order.
791 ArenaVector<HBasicBlock*> blocks_;
792
793 // List of blocks to perform a reverse post order tree traversal.
794 ArenaVector<HBasicBlock*> reverse_post_order_;
795
796 // List of blocks to perform a linear order tree traversal. Unlike the reverse
797 // post order, this order is not incrementally kept up-to-date.
798 ArenaVector<HBasicBlock*> linear_order_;
799
800 HBasicBlock* entry_block_;
801 HBasicBlock* exit_block_;
802
803 // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
804 uint16_t maximum_number_of_out_vregs_;
805
806 // The number of virtual registers in this method. Contains the parameters.
807 uint16_t number_of_vregs_;
808
809 // The number of virtual registers used by parameters of this method.
810 uint16_t number_of_in_vregs_;
811
812 // Number of vreg size slots that the temporaries use (used in baseline compiler).
813 size_t temporaries_vreg_slots_;
814
815 // Flag whether there are bounds checks in the graph. We can skip
816 // BCE if it's false.
817 bool has_bounds_checks_;
818
819 // Flag whether there are try/catch blocks in the graph. We will skip
820 // try/catch-related passes if it's false.
821 bool has_try_catch_;
822
823 // Flag whether there are any HMonitorOperation in the graph. If yes this will mandate
824 // DexRegisterMap to be present to allow deadlock analysis for non-debuggable code.
825 bool has_monitor_operations_;
826
827 // Flags whether SIMD (traditional or predicated) instructions appear in the graph.
828 // If either is true, the code generators may have to be more careful spilling the wider
829 // contents of SIMD registers.
830 bool has_traditional_simd_;
831 bool has_predicated_simd_;
832
833 // Flag whether there are any loops in the graph. We can skip loop
834 // optimization if it's false.
835 bool has_loops_;
836
837 // Flag whether there are any irreducible loops in the graph.
838 bool has_irreducible_loops_;
839
840 // Flag whether there are any direct calls to native code registered
841 // for @CriticalNative methods.
842 bool has_direct_critical_native_call_;
843
844 // Flag whether the graph contains invokes that always throw.
845 bool has_always_throwing_invokes_;
846
847 // Is the code known to be robust against eliminating dead references
848 // and the effects of early finalization? If false, dead reference variables
849 // are kept if they might be visible to the garbage collector.
850 // Currently this means that the class was declared to be dead-reference-safe,
851 // the method accesses no reachability-sensitive fields or data, and the same
852 // is true for any methods that were inlined into the current one.
853 bool dead_reference_safe_;
854
855 // Indicates whether the graph should be compiled in a way that
856 // ensures full debuggability. If false, we can apply more
857 // aggressive optimizations that may limit the level of debugging.
858 const bool debuggable_;
859
860 // The current id to assign to a newly added instruction. See HInstruction.id_.
861 int32_t current_instruction_id_;
862
863 // The dex file from which the method is from.
864 const DexFile& dex_file_;
865
866 // The method index in the dex file.
867 const uint32_t method_idx_;
868
869 // If inlined, this encodes how the callee is being invoked.
870 const InvokeType invoke_type_;
871
872 // Whether the graph has been transformed to SSA form. Only used
873 // in debug mode to ensure we are not using properties only valid
874 // for non-SSA form (like the number of temporaries).
875 bool in_ssa_form_;
876
877 // Number of CHA guards in the graph. Used to short-circuit the
878 // CHA guard optimization pass when there is no CHA guard left.
879 uint32_t number_of_cha_guards_;
880
881 const InstructionSet instruction_set_;
882
883 // Cached constants.
884 HNullConstant* cached_null_constant_;
885 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
886 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
887 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
888 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
889
890 HCurrentMethod* cached_current_method_;
891
892 // The ArtMethod this graph is for. Note that for AOT, it may be null,
893 // for example for methods whose declaring class could not be resolved
894 // (such as when the superclass could not be found).
895 ArtMethod* art_method_;
896
897 // The `ProfilingInfo` associated with the method being compiled.
898 ProfilingInfo* profiling_info_;
899
900 // How we are compiling the graph: either optimized, osr, or baseline.
901 // For osr, we will make all loops seen as irreducible and emit special
902 // stack maps to mark compiled code entries which the interpreter can
903 // directly jump to.
904 const CompilationKind compilation_kind_;
905
906 // Whether after compiling baseline it is still useful re-optimizing this
907 // method.
908 bool useful_optimizing_;
909
910 // List of methods that are assumed to have single implementation.
911 ArenaSet<ArtMethod*> cha_single_implementation_list_;
912
913 friend class SsaBuilder; // For caching constants.
914 friend class SsaLivenessAnalysis; // For the linear order.
915 friend class HInliner; // For the reverse post order.
916 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
917 DISALLOW_COPY_AND_ASSIGN(HGraph);
918 };
919
920 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
921 public:
HLoopInformation(HBasicBlock * header,HGraph * graph)922 HLoopInformation(HBasicBlock* header, HGraph* graph)
923 : header_(header),
924 suspend_check_(nullptr),
925 irreducible_(false),
926 contains_irreducible_loop_(false),
927 back_edges_(graph->GetAllocator()->Adapter(kArenaAllocLoopInfoBackEdges)),
928 // Make bit vector growable, as the number of blocks may change.
929 blocks_(graph->GetAllocator(),
930 graph->GetBlocks().size(),
931 true,
932 kArenaAllocLoopInfoBackEdges) {
933 back_edges_.reserve(kDefaultNumberOfBackEdges);
934 }
935
IsIrreducible()936 bool IsIrreducible() const { return irreducible_; }
ContainsIrreducibleLoop()937 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
938
939 void Dump(std::ostream& os);
940
GetHeader()941 HBasicBlock* GetHeader() const {
942 return header_;
943 }
944
SetHeader(HBasicBlock * block)945 void SetHeader(HBasicBlock* block) {
946 header_ = block;
947 }
948
GetSuspendCheck()949 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
SetSuspendCheck(HSuspendCheck * check)950 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
HasSuspendCheck()951 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
952
AddBackEdge(HBasicBlock * back_edge)953 void AddBackEdge(HBasicBlock* back_edge) {
954 back_edges_.push_back(back_edge);
955 }
956
RemoveBackEdge(HBasicBlock * back_edge)957 void RemoveBackEdge(HBasicBlock* back_edge) {
958 RemoveElement(back_edges_, back_edge);
959 }
960
IsBackEdge(const HBasicBlock & block)961 bool IsBackEdge(const HBasicBlock& block) const {
962 return ContainsElement(back_edges_, &block);
963 }
964
NumberOfBackEdges()965 size_t NumberOfBackEdges() const {
966 return back_edges_.size();
967 }
968
969 HBasicBlock* GetPreHeader() const;
970
GetBackEdges()971 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
972 return back_edges_;
973 }
974
975 // Returns the lifetime position of the back edge that has the
976 // greatest lifetime position.
977 size_t GetLifetimeEnd() const;
978
ReplaceBackEdge(HBasicBlock * existing,HBasicBlock * new_back_edge)979 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
980 ReplaceElement(back_edges_, existing, new_back_edge);
981 }
982
983 // Finds blocks that are part of this loop.
984 void Populate();
985
986 // Updates blocks population of the loop and all of its outer' ones recursively after the
987 // population of the inner loop is updated.
988 void PopulateInnerLoopUpwards(HLoopInformation* inner_loop);
989
990 // Returns whether this loop information contains `block`.
991 // Note that this loop information *must* be populated before entering this function.
992 bool Contains(const HBasicBlock& block) const;
993
994 // Returns whether this loop information is an inner loop of `other`.
995 // Note that `other` *must* be populated before entering this function.
996 bool IsIn(const HLoopInformation& other) const;
997
998 // Returns true if instruction is not defined within this loop.
999 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
1000
GetBlocks()1001 const ArenaBitVector& GetBlocks() const { return blocks_; }
1002
1003 void Add(HBasicBlock* block);
1004 void Remove(HBasicBlock* block);
1005
ClearAllBlocks()1006 void ClearAllBlocks() {
1007 blocks_.ClearAllBits();
1008 }
1009
1010 bool HasBackEdgeNotDominatedByHeader() const;
1011
IsPopulated()1012 bool IsPopulated() const {
1013 return blocks_.GetHighestBitSet() != -1;
1014 }
1015
1016 bool DominatesAllBackEdges(HBasicBlock* block);
1017
1018 bool HasExitEdge() const;
1019
1020 // Resets back edge and blocks-in-loop data.
ResetBasicBlockData()1021 void ResetBasicBlockData() {
1022 back_edges_.clear();
1023 ClearAllBlocks();
1024 }
1025
1026 private:
1027 // Internal recursive implementation of `Populate`.
1028 void PopulateRecursive(HBasicBlock* block);
1029 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
1030
1031 HBasicBlock* header_;
1032 HSuspendCheck* suspend_check_;
1033 bool irreducible_;
1034 bool contains_irreducible_loop_;
1035 ArenaVector<HBasicBlock*> back_edges_;
1036 ArenaBitVector blocks_;
1037
1038 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
1039 };
1040
1041 // Stores try/catch information for basic blocks.
1042 // Note that HGraph is constructed so that catch blocks cannot simultaneously
1043 // be try blocks.
1044 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
1045 public:
1046 // Try block information constructor.
TryCatchInformation(const HTryBoundary & try_entry)1047 explicit TryCatchInformation(const HTryBoundary& try_entry)
1048 : try_entry_(&try_entry),
1049 catch_dex_file_(nullptr),
1050 catch_type_index_(dex::TypeIndex::Invalid()) {
1051 DCHECK(try_entry_ != nullptr);
1052 }
1053
1054 // Catch block information constructor.
TryCatchInformation(dex::TypeIndex catch_type_index,const DexFile & dex_file)1055 TryCatchInformation(dex::TypeIndex catch_type_index, const DexFile& dex_file)
1056 : try_entry_(nullptr),
1057 catch_dex_file_(&dex_file),
1058 catch_type_index_(catch_type_index) {}
1059
IsTryBlock()1060 bool IsTryBlock() const { return try_entry_ != nullptr; }
1061
GetTryEntry()1062 const HTryBoundary& GetTryEntry() const {
1063 DCHECK(IsTryBlock());
1064 return *try_entry_;
1065 }
1066
IsCatchBlock()1067 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
1068
IsValidTypeIndex()1069 bool IsValidTypeIndex() const {
1070 DCHECK(IsCatchBlock());
1071 return catch_type_index_.IsValid();
1072 }
1073
GetCatchTypeIndex()1074 dex::TypeIndex GetCatchTypeIndex() const {
1075 DCHECK(IsCatchBlock());
1076 return catch_type_index_;
1077 }
1078
GetCatchDexFile()1079 const DexFile& GetCatchDexFile() const {
1080 DCHECK(IsCatchBlock());
1081 return *catch_dex_file_;
1082 }
1083
SetInvalidTypeIndex()1084 void SetInvalidTypeIndex() {
1085 catch_type_index_ = dex::TypeIndex::Invalid();
1086 }
1087
1088 private:
1089 // One of possibly several TryBoundary instructions entering the block's try.
1090 // Only set for try blocks.
1091 const HTryBoundary* try_entry_;
1092
1093 // Exception type information. Only set for catch blocks.
1094 const DexFile* catch_dex_file_;
1095 dex::TypeIndex catch_type_index_;
1096 };
1097
1098 static constexpr size_t kNoLifetime = -1;
1099 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
1100
1101 // A block in a method. Contains the list of instructions represented
1102 // as a double linked list. Each block knows its predecessors and
1103 // successors.
1104
1105 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
1106 public:
1107 explicit HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
graph_(graph)1108 : graph_(graph),
1109 predecessors_(graph->GetAllocator()->Adapter(kArenaAllocPredecessors)),
1110 successors_(graph->GetAllocator()->Adapter(kArenaAllocSuccessors)),
1111 loop_information_(nullptr),
1112 dominator_(nullptr),
1113 dominated_blocks_(graph->GetAllocator()->Adapter(kArenaAllocDominated)),
1114 block_id_(kInvalidBlockId),
1115 dex_pc_(dex_pc),
1116 lifetime_start_(kNoLifetime),
1117 lifetime_end_(kNoLifetime),
1118 try_catch_information_(nullptr) {
1119 predecessors_.reserve(kDefaultNumberOfPredecessors);
1120 successors_.reserve(kDefaultNumberOfSuccessors);
1121 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
1122 }
1123
GetPredecessors()1124 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
1125 return predecessors_;
1126 }
1127
GetNumberOfPredecessors()1128 size_t GetNumberOfPredecessors() const {
1129 return GetPredecessors().size();
1130 }
1131
GetSuccessors()1132 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
1133 return successors_;
1134 }
1135
1136 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
1137 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
1138
1139 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
1140 return ContainsElement(successors_, block, start_from);
1141 }
1142
GetDominatedBlocks()1143 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
1144 return dominated_blocks_;
1145 }
1146
IsEntryBlock()1147 bool IsEntryBlock() const {
1148 return graph_->GetEntryBlock() == this;
1149 }
1150
IsExitBlock()1151 bool IsExitBlock() const {
1152 return graph_->GetExitBlock() == this;
1153 }
1154
1155 bool IsSingleGoto() const;
1156 bool IsSingleReturn() const;
1157 bool IsSingleReturnOrReturnVoidAllowingPhis() const;
1158 bool IsSingleTryBoundary() const;
1159
1160 // Returns true if this block emits nothing but a jump.
IsSingleJump()1161 bool IsSingleJump() const {
1162 HLoopInformation* loop_info = GetLoopInformation();
1163 return (IsSingleGoto() || IsSingleTryBoundary())
1164 // Back edges generate a suspend check.
1165 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
1166 }
1167
AddBackEdge(HBasicBlock * back_edge)1168 void AddBackEdge(HBasicBlock* back_edge) {
1169 if (loop_information_ == nullptr) {
1170 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1171 }
1172 DCHECK_EQ(loop_information_->GetHeader(), this);
1173 loop_information_->AddBackEdge(back_edge);
1174 }
1175
1176 // Registers a back edge; if the block was not a loop header before the call associates a newly
1177 // created loop info with it.
1178 //
1179 // Used in SuperblockCloner to preserve LoopInformation object instead of reseting loop
1180 // info for all blocks during back edges recalculation.
AddBackEdgeWhileUpdating(HBasicBlock * back_edge)1181 void AddBackEdgeWhileUpdating(HBasicBlock* back_edge) {
1182 if (loop_information_ == nullptr || loop_information_->GetHeader() != this) {
1183 loop_information_ = new (graph_->GetAllocator()) HLoopInformation(this, graph_);
1184 }
1185 loop_information_->AddBackEdge(back_edge);
1186 }
1187
GetGraph()1188 HGraph* GetGraph() const { return graph_; }
SetGraph(HGraph * graph)1189 void SetGraph(HGraph* graph) { graph_ = graph; }
1190
GetBlockId()1191 uint32_t GetBlockId() const { return block_id_; }
SetBlockId(int id)1192 void SetBlockId(int id) { block_id_ = id; }
GetDexPc()1193 uint32_t GetDexPc() const { return dex_pc_; }
1194
GetDominator()1195 HBasicBlock* GetDominator() const { return dominator_; }
SetDominator(HBasicBlock * dominator)1196 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
AddDominatedBlock(HBasicBlock * block)1197 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
1198
RemoveDominatedBlock(HBasicBlock * block)1199 void RemoveDominatedBlock(HBasicBlock* block) {
1200 RemoveElement(dominated_blocks_, block);
1201 }
1202
ReplaceDominatedBlock(HBasicBlock * existing,HBasicBlock * new_block)1203 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
1204 ReplaceElement(dominated_blocks_, existing, new_block);
1205 }
1206
1207 void ClearDominanceInformation();
1208
NumberOfBackEdges()1209 int NumberOfBackEdges() const {
1210 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
1211 }
1212
GetFirstInstruction()1213 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
GetLastInstruction()1214 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
GetInstructions()1215 const HInstructionList& GetInstructions() const { return instructions_; }
GetFirstPhi()1216 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
GetLastPhi()1217 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
GetPhis()1218 const HInstructionList& GetPhis() const { return phis_; }
1219
1220 HInstruction* GetFirstInstructionDisregardMoves() const;
1221
AddSuccessor(HBasicBlock * block)1222 void AddSuccessor(HBasicBlock* block) {
1223 successors_.push_back(block);
1224 block->predecessors_.push_back(this);
1225 }
1226
ReplaceSuccessor(HBasicBlock * existing,HBasicBlock * new_block)1227 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
1228 size_t successor_index = GetSuccessorIndexOf(existing);
1229 existing->RemovePredecessor(this);
1230 new_block->predecessors_.push_back(this);
1231 successors_[successor_index] = new_block;
1232 }
1233
ReplacePredecessor(HBasicBlock * existing,HBasicBlock * new_block)1234 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
1235 size_t predecessor_index = GetPredecessorIndexOf(existing);
1236 existing->RemoveSuccessor(this);
1237 new_block->successors_.push_back(this);
1238 predecessors_[predecessor_index] = new_block;
1239 }
1240
1241 // Insert `this` between `predecessor` and `successor. This method
1242 // preserves the indices, and will update the first edge found between
1243 // `predecessor` and `successor`.
InsertBetween(HBasicBlock * predecessor,HBasicBlock * successor)1244 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
1245 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
1246 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
1247 successor->predecessors_[predecessor_index] = this;
1248 predecessor->successors_[successor_index] = this;
1249 successors_.push_back(successor);
1250 predecessors_.push_back(predecessor);
1251 }
1252
RemovePredecessor(HBasicBlock * block)1253 void RemovePredecessor(HBasicBlock* block) {
1254 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
1255 }
1256
RemoveSuccessor(HBasicBlock * block)1257 void RemoveSuccessor(HBasicBlock* block) {
1258 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
1259 }
1260
ClearAllPredecessors()1261 void ClearAllPredecessors() {
1262 predecessors_.clear();
1263 }
1264
AddPredecessor(HBasicBlock * block)1265 void AddPredecessor(HBasicBlock* block) {
1266 predecessors_.push_back(block);
1267 block->successors_.push_back(this);
1268 }
1269
SwapPredecessors()1270 void SwapPredecessors() {
1271 DCHECK_EQ(predecessors_.size(), 2u);
1272 std::swap(predecessors_[0], predecessors_[1]);
1273 }
1274
SwapSuccessors()1275 void SwapSuccessors() {
1276 DCHECK_EQ(successors_.size(), 2u);
1277 std::swap(successors_[0], successors_[1]);
1278 }
1279
GetPredecessorIndexOf(HBasicBlock * predecessor)1280 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
1281 return IndexOfElement(predecessors_, predecessor);
1282 }
1283
GetSuccessorIndexOf(HBasicBlock * successor)1284 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
1285 return IndexOfElement(successors_, successor);
1286 }
1287
GetSinglePredecessor()1288 HBasicBlock* GetSinglePredecessor() const {
1289 DCHECK_EQ(GetPredecessors().size(), 1u);
1290 return GetPredecessors()[0];
1291 }
1292
GetSingleSuccessor()1293 HBasicBlock* GetSingleSuccessor() const {
1294 DCHECK_EQ(GetSuccessors().size(), 1u);
1295 return GetSuccessors()[0];
1296 }
1297
1298 // Returns whether the first occurrence of `predecessor` in the list of
1299 // predecessors is at index `idx`.
IsFirstIndexOfPredecessor(HBasicBlock * predecessor,size_t idx)1300 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
1301 DCHECK_EQ(GetPredecessors()[idx], predecessor);
1302 return GetPredecessorIndexOf(predecessor) == idx;
1303 }
1304
1305 // Create a new block between this block and its predecessors. The new block
1306 // is added to the graph, all predecessor edges are relinked to it and an edge
1307 // is created to `this`. Returns the new empty block. Reverse post order or
1308 // loop and try/catch information are not updated.
1309 HBasicBlock* CreateImmediateDominator();
1310
1311 // Split the block into two blocks just before `cursor`. Returns the newly
1312 // created, latter block. Note that this method will add the block to the
1313 // graph, create a Goto at the end of the former block and will create an edge
1314 // between the blocks. It will not, however, update the reverse post order or
1315 // loop and try/catch information.
1316 HBasicBlock* SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form = true);
1317
1318 // Split the block into two blocks just before `cursor`. Returns the newly
1319 // created block. Note that this method just updates raw block information,
1320 // like predecessors, successors, dominators, and instruction list. It does not
1321 // update the graph, reverse post order, loop information, nor make sure the
1322 // blocks are consistent (for example ending with a control flow instruction).
1323 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1324
1325 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1326 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1327
1328 // Merge `other` at the end of `this`. Successors and dominated blocks of
1329 // `other` are changed to be successors and dominated blocks of `this`. Note
1330 // that this method does not update the graph, reverse post order, loop
1331 // information, nor make sure the blocks are consistent (for example ending
1332 // with a control flow instruction).
1333 void MergeWithInlined(HBasicBlock* other);
1334
1335 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1336 // of `this` are moved to `other`.
1337 // Note that this method does not update the graph, reverse post order, loop
1338 // information, nor make sure the blocks are consistent (for example ending
1339 // with a control flow instruction).
1340 void ReplaceWith(HBasicBlock* other);
1341
1342 // Merges the instructions of `other` at the end of `this`.
1343 void MergeInstructionsWith(HBasicBlock* other);
1344
1345 // Merge `other` at the end of `this`. This method updates loops, reverse post
1346 // order, links to predecessors, successors, dominators and deletes the block
1347 // from the graph. The two blocks must be successive, i.e. `this` the only
1348 // predecessor of `other` and vice versa.
1349 void MergeWith(HBasicBlock* other);
1350
1351 // Disconnects `this` from all its predecessors, successors and dominator,
1352 // removes it from all loops it is included in and eventually from the graph.
1353 // The block must not dominate any other block. Predecessors and successors
1354 // are safely updated.
1355 void DisconnectAndDelete();
1356
1357 // Disconnects `this` from all its successors and updates their phis, if the successors have them.
1358 // If `visited` is provided, it will use the information to know if a successor is reachable and
1359 // skip updating those phis.
1360 void DisconnectFromSuccessors(const ArenaBitVector* visited = nullptr);
1361
1362 // Removes the catch phi uses of the instructions in `this`, and then remove the instruction
1363 // itself. If `building_dominator_tree` is true, it will not remove the instruction as user, since
1364 // we do it in a previous step. This is a special case for building up the dominator tree: we want
1365 // to eliminate uses before inputs but we don't have domination information, so we remove all
1366 // connections from input/uses first before removing any instruction.
1367 // This method assumes the instructions have been removed from all users with the exception of
1368 // catch phis because of missing exceptional edges in the graph.
1369 void RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree);
1370
1371 void AddInstruction(HInstruction* instruction);
1372 // Insert `instruction` before/after an existing instruction `cursor`.
1373 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1374 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1375 // Replace phi `initial` with `replacement` within this block.
1376 void ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement);
1377 // Replace instruction `initial` with `replacement` within this block.
1378 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1379 HInstruction* replacement);
1380 void AddPhi(HPhi* phi);
1381 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1382 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1383 // instruction list. With 'ensure_safety' set to true, it verifies that the
1384 // instruction is not in use and removes it from the use lists of its inputs.
1385 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1386 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1387 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1388
IsLoopHeader()1389 bool IsLoopHeader() const {
1390 return IsInLoop() && (loop_information_->GetHeader() == this);
1391 }
1392
IsLoopPreHeaderFirstPredecessor()1393 bool IsLoopPreHeaderFirstPredecessor() const {
1394 DCHECK(IsLoopHeader());
1395 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1396 }
1397
IsFirstPredecessorBackEdge()1398 bool IsFirstPredecessorBackEdge() const {
1399 DCHECK(IsLoopHeader());
1400 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1401 }
1402
GetLoopInformation()1403 HLoopInformation* GetLoopInformation() const {
1404 return loop_information_;
1405 }
1406
1407 // Set the loop_information_ on this block. Overrides the current
1408 // loop_information if it is an outer loop of the passed loop information.
1409 // Note that this method is called while creating the loop information.
SetInLoop(HLoopInformation * info)1410 void SetInLoop(HLoopInformation* info) {
1411 if (IsLoopHeader()) {
1412 // Nothing to do. This just means `info` is an outer loop.
1413 } else if (!IsInLoop()) {
1414 loop_information_ = info;
1415 } else if (loop_information_->Contains(*info->GetHeader())) {
1416 // Block is currently part of an outer loop. Make it part of this inner loop.
1417 // Note that a non loop header having a loop information means this loop information
1418 // has already been populated
1419 loop_information_ = info;
1420 } else {
1421 // Block is part of an inner loop. Do not update the loop information.
1422 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1423 // at this point, because this method is being called while populating `info`.
1424 }
1425 }
1426
1427 // Raw update of the loop information.
SetLoopInformation(HLoopInformation * info)1428 void SetLoopInformation(HLoopInformation* info) {
1429 loop_information_ = info;
1430 }
1431
IsInLoop()1432 bool IsInLoop() const { return loop_information_ != nullptr; }
1433
GetTryCatchInformation()1434 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1435
SetTryCatchInformation(TryCatchInformation * try_catch_information)1436 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1437 try_catch_information_ = try_catch_information;
1438 }
1439
IsTryBlock()1440 bool IsTryBlock() const {
1441 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1442 }
1443
IsCatchBlock()1444 bool IsCatchBlock() const {
1445 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1446 }
1447
1448 // Returns the try entry that this block's successors should have. They will
1449 // be in the same try, unless the block ends in a try boundary. In that case,
1450 // the appropriate try entry will be returned.
1451 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1452
1453 bool HasThrowingInstructions() const;
1454
1455 // Returns whether this block dominates the blocked passed as parameter.
1456 bool Dominates(const HBasicBlock* block) const;
1457
GetLifetimeStart()1458 size_t GetLifetimeStart() const { return lifetime_start_; }
GetLifetimeEnd()1459 size_t GetLifetimeEnd() const { return lifetime_end_; }
1460
SetLifetimeStart(size_t start)1461 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
SetLifetimeEnd(size_t end)1462 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1463
1464 bool EndsWithControlFlowInstruction() const;
1465 bool EndsWithReturn() const;
1466 bool EndsWithIf() const;
1467 bool EndsWithTryBoundary() const;
1468 bool HasSinglePhi() const;
1469
1470 private:
1471 HGraph* graph_;
1472 ArenaVector<HBasicBlock*> predecessors_;
1473 ArenaVector<HBasicBlock*> successors_;
1474 HInstructionList instructions_;
1475 HInstructionList phis_;
1476 HLoopInformation* loop_information_;
1477 HBasicBlock* dominator_;
1478 ArenaVector<HBasicBlock*> dominated_blocks_;
1479 uint32_t block_id_;
1480 // The dex program counter of the first instruction of this block.
1481 const uint32_t dex_pc_;
1482 size_t lifetime_start_;
1483 size_t lifetime_end_;
1484 TryCatchInformation* try_catch_information_;
1485
1486 friend class HGraph;
1487 friend class HInstruction;
1488 // Allow manual control of the ordering of predecessors/successors
1489 friend class OptimizingUnitTestHelper;
1490
1491 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1492 };
1493
1494 // Iterates over the LoopInformation of all loops which contain 'block'
1495 // from the innermost to the outermost.
1496 class HLoopInformationOutwardIterator : public ValueObject {
1497 public:
HLoopInformationOutwardIterator(const HBasicBlock & block)1498 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1499 : current_(block.GetLoopInformation()) {}
1500
Done()1501 bool Done() const { return current_ == nullptr; }
1502
Advance()1503 void Advance() {
1504 DCHECK(!Done());
1505 current_ = current_->GetPreHeader()->GetLoopInformation();
1506 }
1507
Current()1508 HLoopInformation* Current() const {
1509 DCHECK(!Done());
1510 return current_;
1511 }
1512
1513 private:
1514 HLoopInformation* current_;
1515
1516 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1517 };
1518
1519 #define FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1520 M(Above, Condition) \
1521 M(AboveOrEqual, Condition) \
1522 M(Abs, UnaryOperation) \
1523 M(Add, BinaryOperation) \
1524 M(And, BinaryOperation) \
1525 M(ArrayGet, Instruction) \
1526 M(ArrayLength, Instruction) \
1527 M(ArraySet, Instruction) \
1528 M(Below, Condition) \
1529 M(BelowOrEqual, Condition) \
1530 M(BitwiseNegatedRight, BinaryOperation) \
1531 M(BooleanNot, UnaryOperation) \
1532 M(BoundsCheck, Instruction) \
1533 M(BoundType, Instruction) \
1534 M(CheckCast, Instruction) \
1535 M(ClassTableGet, Instruction) \
1536 M(ClearException, Instruction) \
1537 M(ClinitCheck, Instruction) \
1538 M(Compare, BinaryOperation) \
1539 M(ConstructorFence, Instruction) \
1540 M(CurrentMethod, Instruction) \
1541 M(ShouldDeoptimizeFlag, Instruction) \
1542 M(Deoptimize, Instruction) \
1543 M(Div, BinaryOperation) \
1544 M(DivZeroCheck, Instruction) \
1545 M(DoubleConstant, Constant) \
1546 M(Equal, Condition) \
1547 M(Exit, Instruction) \
1548 M(FloatConstant, Constant) \
1549 M(Goto, Instruction) \
1550 M(GreaterThan, Condition) \
1551 M(GreaterThanOrEqual, Condition) \
1552 M(If, Instruction) \
1553 M(InstanceFieldGet, Instruction) \
1554 M(InstanceFieldSet, Instruction) \
1555 M(InstanceOf, Instruction) \
1556 M(IntConstant, Constant) \
1557 M(IntermediateAddress, Instruction) \
1558 M(InvokeUnresolved, Invoke) \
1559 M(InvokeInterface, Invoke) \
1560 M(InvokeStaticOrDirect, Invoke) \
1561 M(InvokeVirtual, Invoke) \
1562 M(InvokePolymorphic, Invoke) \
1563 M(InvokeCustom, Invoke) \
1564 M(LessThan, Condition) \
1565 M(LessThanOrEqual, Condition) \
1566 M(LoadClass, Instruction) \
1567 M(LoadException, Instruction) \
1568 M(LoadMethodHandle, Instruction) \
1569 M(LoadMethodType, Instruction) \
1570 M(LoadString, Instruction) \
1571 M(LongConstant, Constant) \
1572 M(Max, Instruction) \
1573 M(MemoryBarrier, Instruction) \
1574 M(MethodEntryHook, Instruction) \
1575 M(MethodExitHook, Instruction) \
1576 M(Min, BinaryOperation) \
1577 M(MonitorOperation, Instruction) \
1578 M(Mul, BinaryOperation) \
1579 M(Neg, UnaryOperation) \
1580 M(NewArray, Instruction) \
1581 M(NewInstance, Instruction) \
1582 M(Nop, Instruction) \
1583 M(Not, UnaryOperation) \
1584 M(NotEqual, Condition) \
1585 M(NullConstant, Instruction) \
1586 M(NullCheck, Instruction) \
1587 M(Or, BinaryOperation) \
1588 M(PackedSwitch, Instruction) \
1589 M(ParallelMove, Instruction) \
1590 M(ParameterValue, Instruction) \
1591 M(Phi, Instruction) \
1592 M(Rem, BinaryOperation) \
1593 M(Return, Instruction) \
1594 M(ReturnVoid, Instruction) \
1595 M(Ror, BinaryOperation) \
1596 M(Shl, BinaryOperation) \
1597 M(Shr, BinaryOperation) \
1598 M(StaticFieldGet, Instruction) \
1599 M(StaticFieldSet, Instruction) \
1600 M(StringBuilderAppend, Instruction) \
1601 M(UnresolvedInstanceFieldGet, Instruction) \
1602 M(UnresolvedInstanceFieldSet, Instruction) \
1603 M(UnresolvedStaticFieldGet, Instruction) \
1604 M(UnresolvedStaticFieldSet, Instruction) \
1605 M(Select, Instruction) \
1606 M(Sub, BinaryOperation) \
1607 M(SuspendCheck, Instruction) \
1608 M(Throw, Instruction) \
1609 M(TryBoundary, Instruction) \
1610 M(TypeConversion, Instruction) \
1611 M(UShr, BinaryOperation) \
1612 M(Xor, BinaryOperation)
1613
1614 #define FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M) \
1615 M(VecReplicateScalar, VecUnaryOperation) \
1616 M(VecExtractScalar, VecUnaryOperation) \
1617 M(VecReduce, VecUnaryOperation) \
1618 M(VecCnv, VecUnaryOperation) \
1619 M(VecNeg, VecUnaryOperation) \
1620 M(VecAbs, VecUnaryOperation) \
1621 M(VecNot, VecUnaryOperation) \
1622 M(VecAdd, VecBinaryOperation) \
1623 M(VecHalvingAdd, VecBinaryOperation) \
1624 M(VecSub, VecBinaryOperation) \
1625 M(VecMul, VecBinaryOperation) \
1626 M(VecDiv, VecBinaryOperation) \
1627 M(VecMin, VecBinaryOperation) \
1628 M(VecMax, VecBinaryOperation) \
1629 M(VecAnd, VecBinaryOperation) \
1630 M(VecAndNot, VecBinaryOperation) \
1631 M(VecOr, VecBinaryOperation) \
1632 M(VecXor, VecBinaryOperation) \
1633 M(VecSaturationAdd, VecBinaryOperation) \
1634 M(VecSaturationSub, VecBinaryOperation) \
1635 M(VecShl, VecBinaryOperation) \
1636 M(VecShr, VecBinaryOperation) \
1637 M(VecUShr, VecBinaryOperation) \
1638 M(VecSetScalars, VecOperation) \
1639 M(VecMultiplyAccumulate, VecOperation) \
1640 M(VecSADAccumulate, VecOperation) \
1641 M(VecDotProd, VecOperation) \
1642 M(VecLoad, VecMemoryOperation) \
1643 M(VecStore, VecMemoryOperation) \
1644 M(VecPredSetAll, VecPredSetOperation) \
1645 M(VecPredWhile, VecPredSetOperation) \
1646 M(VecPredToBoolean, VecOperation) \
1647 M(VecCondition, VecPredSetOperation) \
1648 M(VecPredNot, VecPredSetOperation) \
1649
1650 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1651 FOR_EACH_CONCRETE_INSTRUCTION_SCALAR_COMMON(M) \
1652 FOR_EACH_CONCRETE_INSTRUCTION_VECTOR_COMMON(M)
1653
1654 /*
1655 * Instructions, shared across several (not all) architectures.
1656 */
1657 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1658 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1659 #else
1660 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1661 M(DataProcWithShifterOp, Instruction) \
1662 M(MultiplyAccumulate, Instruction) \
1663 M(IntermediateAddressIndex, Instruction)
1664 #endif
1665
1666 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1667
1668 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1669
1670 #if defined(ART_ENABLE_CODEGEN_riscv64)
1671 #define FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M) M(Riscv64ShiftAdd, Instruction)
1672 #else
1673 #define FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M)
1674 #endif
1675
1676 #ifndef ART_ENABLE_CODEGEN_x86
1677 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1678 #else
1679 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1680 M(X86ComputeBaseMethodAddress, Instruction) \
1681 M(X86LoadFromConstantTable, Instruction) \
1682 M(X86FPNeg, Instruction) \
1683 M(X86PackedSwitch, Instruction)
1684 #endif
1685
1686 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
1687 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M) \
1688 M(X86AndNot, Instruction) \
1689 M(X86MaskOrResetLeastSetBit, Instruction)
1690 #else
1691 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1692 #endif
1693
1694 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1695
1696 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1697 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1698 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1699 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1700 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1701 FOR_EACH_CONCRETE_INSTRUCTION_RISCV64(M) \
1702 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1703 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M) \
1704 FOR_EACH_CONCRETE_INSTRUCTION_X86_COMMON(M)
1705
1706 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1707 M(Condition, BinaryOperation) \
1708 M(Constant, Instruction) \
1709 M(UnaryOperation, Instruction) \
1710 M(BinaryOperation, Instruction) \
1711 M(Invoke, Instruction) \
1712 M(VecOperation, Instruction) \
1713 M(VecUnaryOperation, VecOperation) \
1714 M(VecBinaryOperation, VecOperation) \
1715 M(VecMemoryOperation, VecOperation) \
1716 M(VecPredSetOperation, VecOperation)
1717
1718 #define FOR_EACH_INSTRUCTION(M) \
1719 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1720 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1721
1722 #define FORWARD_DECLARATION(type, super) class H##type;
FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)1723 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1724 #undef FORWARD_DECLARATION
1725
1726 #define DECLARE_INSTRUCTION(type) \
1727 private: \
1728 H##type& operator=(const H##type&) = delete; \
1729 public: \
1730 const char* DebugName() const override { return #type; } \
1731 HInstruction* Clone(ArenaAllocator* arena) const override { \
1732 DCHECK(IsClonable()); \
1733 return new (arena) H##type(*this); \
1734 } \
1735 void Accept(HGraphVisitor* visitor) override
1736
1737 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1738 private: \
1739 H##type& operator=(const H##type&) = delete; \
1740 public:
1741
1742 #define DEFAULT_COPY_CONSTRUCTOR(type) H##type(const H##type& other) = default;
1743
1744 template <typename T>
1745 class HUseListNode : public ArenaObject<kArenaAllocUseListNode>,
1746 public IntrusiveForwardListNode<HUseListNode<T>> {
1747 public:
1748 // Get the instruction which has this use as one of the inputs.
1749 T GetUser() const { return user_; }
1750 // Get the position of the input record that this use corresponds to.
1751 size_t GetIndex() const { return index_; }
1752 // Set the position of the input record that this use corresponds to.
1753 void SetIndex(size_t index) { index_ = index; }
1754
1755 private:
1756 HUseListNode(T user, size_t index)
1757 : user_(user), index_(index) {}
1758
1759 T const user_;
1760 size_t index_;
1761
1762 friend class HInstruction;
1763
1764 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1765 };
1766
1767 template <typename T>
1768 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1769
1770 // This class is used by HEnvironment and HInstruction classes to record the
1771 // instructions they use and pointers to the corresponding HUseListNodes kept
1772 // by the used instructions.
1773 template <typename T>
1774 class HUserRecord : public ValueObject {
1775 public:
HUserRecord()1776 HUserRecord() : instruction_(nullptr), before_use_node_() {}
HUserRecord(HInstruction * instruction)1777 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1778
HUserRecord(const HUserRecord<T> & old_record,typename HUseList<T>::iterator before_use_node)1779 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1780 : HUserRecord(old_record.instruction_, before_use_node) {}
HUserRecord(HInstruction * instruction,typename HUseList<T>::iterator before_use_node)1781 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1782 : instruction_(instruction), before_use_node_(before_use_node) {
1783 DCHECK(instruction_ != nullptr);
1784 }
1785
GetInstruction()1786 HInstruction* GetInstruction() const { return instruction_; }
GetBeforeUseNode()1787 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
GetUseNode()1788 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1789
1790 private:
1791 // Instruction used by the user.
1792 HInstruction* instruction_;
1793
1794 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1795 typename HUseList<T>::iterator before_use_node_;
1796 };
1797
1798 // Helper class that extracts the input instruction from HUserRecord<HInstruction*>.
1799 // This is used for HInstruction::GetInputs() to return a container wrapper providing
1800 // HInstruction* values even though the underlying container has HUserRecord<>s.
1801 struct HInputExtractor {
operatorHInputExtractor1802 HInstruction* operator()(HUserRecord<HInstruction*>& record) const {
1803 return record.GetInstruction();
1804 }
operatorHInputExtractor1805 const HInstruction* operator()(const HUserRecord<HInstruction*>& record) const {
1806 return record.GetInstruction();
1807 }
1808 };
1809
1810 using HInputsRef = TransformArrayRef<HUserRecord<HInstruction*>, HInputExtractor>;
1811 using HConstInputsRef = TransformArrayRef<const HUserRecord<HInstruction*>, HInputExtractor>;
1812
1813 /**
1814 * Side-effects representation.
1815 *
1816 * For write/read dependences on fields/arrays, the dependence analysis uses
1817 * type disambiguation (e.g. a float field write cannot modify the value of an
1818 * integer field read) and the access type (e.g. a reference array write cannot
1819 * modify the value of a reference field read [although it may modify the
1820 * reference fetch prior to reading the field, which is represented by its own
1821 * write/read dependence]). The analysis makes conservative points-to
1822 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1823 * the same, and any reference read depends on any reference read without
1824 * further regard of its type).
1825 *
1826 * kDependsOnGCBit is defined in the following way: instructions with kDependsOnGCBit must not be
1827 * alive across the point where garbage collection might happen.
1828 *
1829 * Note: Instructions with kCanTriggerGCBit do not depend on each other.
1830 *
1831 * kCanTriggerGCBit must be used for instructions for which GC might happen on the path across
1832 * those instructions from the compiler perspective (between this instruction and the next one
1833 * in the IR).
1834 *
1835 * Note: Instructions which can cause GC only on a fatal slow path do not need
1836 * kCanTriggerGCBit as the execution never returns to the instruction next to the exceptional
1837 * one. However the execution may return to compiled code if there is a catch block in the
1838 * current method; for this purpose the TryBoundary exit instruction has kCanTriggerGCBit
1839 * set.
1840 *
1841 * The internal representation uses 38-bit and is described in the table below.
1842 * The first line indicates the side effect, and for field/array accesses the
1843 * second line indicates the type of the access (in the order of the
1844 * DataType::Type enum).
1845 * The two numbered lines below indicate the bit position in the bitfield (read
1846 * vertically).
1847 *
1848 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1849 * +-------------+---------+---------+--------------+---------+---------+
1850 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1851 * | 3 |333333322|222222221| 1 |111111110|000000000|
1852 * | 7 |654321098|765432109| 8 |765432109|876543210|
1853 *
1854 * Note that, to ease the implementation, 'changes' bits are least significant
1855 * bits, while 'dependency' bits are most significant bits.
1856 */
1857 class SideEffects : public ValueObject {
1858 public:
SideEffects()1859 SideEffects() : flags_(0) {}
1860
None()1861 static SideEffects None() {
1862 return SideEffects(0);
1863 }
1864
All()1865 static SideEffects All() {
1866 return SideEffects(kAllChangeBits | kAllDependOnBits);
1867 }
1868
AllChanges()1869 static SideEffects AllChanges() {
1870 return SideEffects(kAllChangeBits);
1871 }
1872
AllDependencies()1873 static SideEffects AllDependencies() {
1874 return SideEffects(kAllDependOnBits);
1875 }
1876
AllExceptGCDependency()1877 static SideEffects AllExceptGCDependency() {
1878 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1879 }
1880
AllWritesAndReads()1881 static SideEffects AllWritesAndReads() {
1882 return SideEffects(kAllWrites | kAllReads);
1883 }
1884
AllWrites()1885 static SideEffects AllWrites() {
1886 return SideEffects(kAllWrites);
1887 }
1888
AllReads()1889 static SideEffects AllReads() {
1890 return SideEffects(kAllReads);
1891 }
1892
FieldWriteOfType(DataType::Type type,bool is_volatile)1893 static SideEffects FieldWriteOfType(DataType::Type type, bool is_volatile) {
1894 return is_volatile
1895 ? AllWritesAndReads()
1896 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1897 }
1898
ArrayWriteOfType(DataType::Type type)1899 static SideEffects ArrayWriteOfType(DataType::Type type) {
1900 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1901 }
1902
FieldReadOfType(DataType::Type type,bool is_volatile)1903 static SideEffects FieldReadOfType(DataType::Type type, bool is_volatile) {
1904 return is_volatile
1905 ? AllWritesAndReads()
1906 : SideEffects(TypeFlag(type, kFieldReadOffset));
1907 }
1908
ArrayReadOfType(DataType::Type type)1909 static SideEffects ArrayReadOfType(DataType::Type type) {
1910 return SideEffects(TypeFlag(type, kArrayReadOffset));
1911 }
1912
1913 // Returns whether GC might happen across this instruction from the compiler perspective so
1914 // the next instruction in the IR would see that.
1915 //
1916 // See the SideEffect class comments.
CanTriggerGC()1917 static SideEffects CanTriggerGC() {
1918 return SideEffects(1ULL << kCanTriggerGCBit);
1919 }
1920
1921 // Returns whether the instruction must not be alive across a GC point.
1922 //
1923 // See the SideEffect class comments.
DependsOnGC()1924 static SideEffects DependsOnGC() {
1925 return SideEffects(1ULL << kDependsOnGCBit);
1926 }
1927
1928 // Combines the side-effects of this and the other.
Union(SideEffects other)1929 SideEffects Union(SideEffects other) const {
1930 return SideEffects(flags_ | other.flags_);
1931 }
1932
Exclusion(SideEffects other)1933 SideEffects Exclusion(SideEffects other) const {
1934 return SideEffects(flags_ & ~other.flags_);
1935 }
1936
Add(SideEffects other)1937 void Add(SideEffects other) {
1938 flags_ |= other.flags_;
1939 }
1940
Includes(SideEffects other)1941 bool Includes(SideEffects other) const {
1942 return (other.flags_ & flags_) == other.flags_;
1943 }
1944
HasSideEffects()1945 bool HasSideEffects() const {
1946 return (flags_ & kAllChangeBits);
1947 }
1948
HasDependencies()1949 bool HasDependencies() const {
1950 return (flags_ & kAllDependOnBits);
1951 }
1952
1953 // Returns true if there are no side effects or dependencies.
DoesNothing()1954 bool DoesNothing() const {
1955 return flags_ == 0;
1956 }
1957
1958 // Returns true if something is written.
DoesAnyWrite()1959 bool DoesAnyWrite() const {
1960 return (flags_ & kAllWrites);
1961 }
1962
1963 // Returns true if something is read.
DoesAnyRead()1964 bool DoesAnyRead() const {
1965 return (flags_ & kAllReads);
1966 }
1967
1968 // Returns true if potentially everything is written and read
1969 // (every type and every kind of access).
DoesAllReadWrite()1970 bool DoesAllReadWrite() const {
1971 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1972 }
1973
DoesAll()1974 bool DoesAll() const {
1975 return flags_ == (kAllChangeBits | kAllDependOnBits);
1976 }
1977
1978 // Returns true if `this` may read something written by `other`.
MayDependOn(SideEffects other)1979 bool MayDependOn(SideEffects other) const {
1980 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1981 return (other.flags_ & depends_on_flags);
1982 }
1983
1984 // Returns string representation of flags (for debugging only).
1985 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
ToString()1986 std::string ToString() const {
1987 std::string flags = "|";
1988 for (int s = kLastBit; s >= 0; s--) {
1989 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1990 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1991 // This is a bit for the GC side effect.
1992 if (current_bit_is_set) {
1993 flags += "GC";
1994 }
1995 flags += "|";
1996 } else {
1997 // This is a bit for the array/field analysis.
1998 // The underscore character stands for the 'can trigger GC' bit.
1999 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
2000 if (current_bit_is_set) {
2001 flags += kDebug[s];
2002 }
2003 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
2004 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
2005 flags += "|";
2006 }
2007 }
2008 }
2009 return flags;
2010 }
2011
Equals(const SideEffects & other)2012 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
2013
2014 private:
2015 static constexpr int kFieldArrayAnalysisBits = 9;
2016
2017 static constexpr int kFieldWriteOffset = 0;
2018 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
2019 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
2020 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
2021
2022 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
2023
2024 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
2025 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
2026 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
2027 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
2028
2029 static constexpr int kLastBit = kDependsOnGCBit;
2030 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
2031
2032 // Aliases.
2033
2034 static_assert(kChangeBits == kDependOnBits,
2035 "the 'change' bits should match the 'depend on' bits.");
2036
2037 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
2038 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
2039 static constexpr uint64_t kAllWrites =
2040 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
2041 static constexpr uint64_t kAllReads =
2042 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
2043
2044 // Translates type to bit flag. The type must correspond to a Java type.
TypeFlag(DataType::Type type,int offset)2045 static uint64_t TypeFlag(DataType::Type type, int offset) {
2046 int shift;
2047 switch (type) {
2048 case DataType::Type::kReference: shift = 0; break;
2049 case DataType::Type::kBool: shift = 1; break;
2050 case DataType::Type::kInt8: shift = 2; break;
2051 case DataType::Type::kUint16: shift = 3; break;
2052 case DataType::Type::kInt16: shift = 4; break;
2053 case DataType::Type::kInt32: shift = 5; break;
2054 case DataType::Type::kInt64: shift = 6; break;
2055 case DataType::Type::kFloat32: shift = 7; break;
2056 case DataType::Type::kFloat64: shift = 8; break;
2057 default:
2058 LOG(FATAL) << "Unexpected data type " << type;
2059 UNREACHABLE();
2060 }
2061 DCHECK_LE(kFieldWriteOffset, shift);
2062 DCHECK_LT(shift, kArrayWriteOffset);
2063 return UINT64_C(1) << (shift + offset);
2064 }
2065
2066 // Private constructor on direct flags value.
SideEffects(uint64_t flags)2067 explicit SideEffects(uint64_t flags) : flags_(flags) {}
2068
2069 uint64_t flags_;
2070 };
2071
2072 // A HEnvironment object contains the values of virtual registers at a given location.
2073 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
2074 public:
HEnvironment(ArenaAllocator * allocator,size_t number_of_vregs,ArtMethod * method,uint32_t dex_pc,HInstruction * holder)2075 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2076 size_t number_of_vregs,
2077 ArtMethod* method,
2078 uint32_t dex_pc,
2079 HInstruction* holder)
2080 : vregs_(number_of_vregs, allocator->Adapter(kArenaAllocEnvironmentVRegs)),
2081 locations_(allocator->Adapter(kArenaAllocEnvironmentLocations)),
2082 parent_(nullptr),
2083 method_(method),
2084 dex_pc_(dex_pc),
2085 holder_(holder) {
2086 }
2087
HEnvironment(ArenaAllocator * allocator,const HEnvironment & to_copy,HInstruction * holder)2088 ALWAYS_INLINE HEnvironment(ArenaAllocator* allocator,
2089 const HEnvironment& to_copy,
2090 HInstruction* holder)
2091 : HEnvironment(allocator,
2092 to_copy.Size(),
2093 to_copy.GetMethod(),
2094 to_copy.GetDexPc(),
2095 holder) {}
2096
AllocateLocations()2097 void AllocateLocations() {
2098 DCHECK(locations_.empty());
2099 locations_.resize(vregs_.size());
2100 }
2101
SetAndCopyParentChain(ArenaAllocator * allocator,HEnvironment * parent)2102 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
2103 if (parent_ != nullptr) {
2104 parent_->SetAndCopyParentChain(allocator, parent);
2105 } else {
2106 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
2107 parent_->CopyFrom(parent);
2108 if (parent->GetParent() != nullptr) {
2109 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
2110 }
2111 }
2112 }
2113
2114 void CopyFrom(ArrayRef<HInstruction* const> locals);
2115 void CopyFrom(HEnvironment* environment);
2116
2117 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
2118 // input to the loop phi instead. This is for inserting instructions that
2119 // require an environment (like HDeoptimization) in the loop pre-header.
2120 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
2121
SetRawEnvAt(size_t index,HInstruction * instruction)2122 void SetRawEnvAt(size_t index, HInstruction* instruction) {
2123 vregs_[index] = HUserRecord<HEnvironment*>(instruction);
2124 }
2125
GetInstructionAt(size_t index)2126 HInstruction* GetInstructionAt(size_t index) const {
2127 return vregs_[index].GetInstruction();
2128 }
2129
2130 void RemoveAsUserOfInput(size_t index) const;
2131
2132 // Replaces the input at the position 'index' with the replacement; the replacement and old
2133 // input instructions' env_uses_ lists are adjusted. The function works similar to
2134 // HInstruction::ReplaceInput.
2135 void ReplaceInput(HInstruction* replacement, size_t index);
2136
Size()2137 size_t Size() const { return vregs_.size(); }
2138
GetParent()2139 HEnvironment* GetParent() const { return parent_; }
2140
SetLocationAt(size_t index,Location location)2141 void SetLocationAt(size_t index, Location location) {
2142 locations_[index] = location;
2143 }
2144
GetLocationAt(size_t index)2145 Location GetLocationAt(size_t index) const {
2146 return locations_[index];
2147 }
2148
GetDexPc()2149 uint32_t GetDexPc() const {
2150 return dex_pc_;
2151 }
2152
GetMethod()2153 ArtMethod* GetMethod() const {
2154 return method_;
2155 }
2156
GetHolder()2157 HInstruction* GetHolder() const {
2158 return holder_;
2159 }
2160
2161
IsFromInlinedInvoke()2162 bool IsFromInlinedInvoke() const {
2163 return GetParent() != nullptr;
2164 }
2165
2166 class EnvInputSelector {
2167 public:
EnvInputSelector(const HEnvironment * e)2168 explicit EnvInputSelector(const HEnvironment* e) : env_(e) {}
operator()2169 HInstruction* operator()(size_t s) const {
2170 return env_->GetInstructionAt(s);
2171 }
2172 private:
2173 const HEnvironment* env_;
2174 };
2175
2176 using HConstEnvInputRef = TransformIterator<CountIter, EnvInputSelector>;
GetEnvInputs()2177 IterationRange<HConstEnvInputRef> GetEnvInputs() const {
2178 IterationRange<CountIter> range(Range(Size()));
2179 return MakeIterationRange(MakeTransformIterator(range.begin(), EnvInputSelector(this)),
2180 MakeTransformIterator(range.end(), EnvInputSelector(this)));
2181 }
2182
2183 private:
2184 ArenaVector<HUserRecord<HEnvironment*>> vregs_;
2185 ArenaVector<Location> locations_;
2186 HEnvironment* parent_;
2187 ArtMethod* method_;
2188 const uint32_t dex_pc_;
2189
2190 // The instruction that holds this environment.
2191 HInstruction* const holder_;
2192
2193 friend class HInstruction;
2194
2195 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
2196 };
2197
2198 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs);
2199
2200 // Iterates over the Environments
2201 class HEnvironmentIterator : public ValueObject {
2202 public:
2203 using iterator_category = std::forward_iterator_tag;
2204 using value_type = HEnvironment*;
2205 using difference_type = ptrdiff_t;
2206 using pointer = void;
2207 using reference = void;
2208
HEnvironmentIterator(HEnvironment * cur)2209 explicit HEnvironmentIterator(HEnvironment* cur) : cur_(cur) {}
2210
2211 HEnvironment* operator*() const {
2212 return cur_;
2213 }
2214
2215 HEnvironmentIterator& operator++() {
2216 DCHECK(cur_ != nullptr);
2217 cur_ = cur_->GetParent();
2218 return *this;
2219 }
2220
2221 HEnvironmentIterator operator++(int) {
2222 HEnvironmentIterator prev(*this);
2223 ++(*this);
2224 return prev;
2225 }
2226
2227 bool operator==(const HEnvironmentIterator& other) const {
2228 return other.cur_ == cur_;
2229 }
2230
2231 bool operator!=(const HEnvironmentIterator& other) const {
2232 return !(*this == other);
2233 }
2234
2235 private:
2236 HEnvironment* cur_;
2237 };
2238
2239 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
2240 public:
2241 #define DECLARE_KIND(type, super) k##type,
2242 enum InstructionKind { // private marker to avoid generate-operator-out.py from processing.
2243 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_KIND)
2244 kLastInstructionKind
2245 };
2246 #undef DECLARE_KIND
2247
HInstruction(InstructionKind kind,SideEffects side_effects,uint32_t dex_pc)2248 HInstruction(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
2249 : HInstruction(kind, DataType::Type::kVoid, side_effects, dex_pc) {}
2250
HInstruction(InstructionKind kind,DataType::Type type,SideEffects side_effects,uint32_t dex_pc)2251 HInstruction(InstructionKind kind, DataType::Type type, SideEffects side_effects, uint32_t dex_pc)
2252 : previous_(nullptr),
2253 next_(nullptr),
2254 block_(nullptr),
2255 dex_pc_(dex_pc),
2256 id_(-1),
2257 ssa_index_(-1),
2258 packed_fields_(0u),
2259 environment_(nullptr),
2260 locations_(nullptr),
2261 live_interval_(nullptr),
2262 lifetime_position_(kNoLifetime),
2263 side_effects_(side_effects),
2264 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
2265 SetPackedField<InstructionKindField>(kind);
2266 SetPackedField<TypeField>(type);
2267 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
2268 }
2269
~HInstruction()2270 virtual ~HInstruction() {}
2271
2272 std::ostream& Dump(std::ostream& os, bool dump_args = false);
2273
2274 // Helper for dumping without argument information using operator<<
2275 struct NoArgsDump {
2276 const HInstruction* ins;
2277 };
DumpWithoutArgs()2278 NoArgsDump DumpWithoutArgs() const {
2279 return NoArgsDump{this};
2280 }
2281 // Helper for dumping with argument information using operator<<
2282 struct ArgsDump {
2283 const HInstruction* ins;
2284 };
DumpWithArgs()2285 ArgsDump DumpWithArgs() const {
2286 return ArgsDump{this};
2287 }
2288
GetNext()2289 HInstruction* GetNext() const { return next_; }
GetPrevious()2290 HInstruction* GetPrevious() const { return previous_; }
2291
2292 HInstruction* GetNextDisregardingMoves() const;
2293 HInstruction* GetPreviousDisregardingMoves() const;
2294
GetBlock()2295 HBasicBlock* GetBlock() const { return block_; }
GetAllocator()2296 ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); }
SetBlock(HBasicBlock * block)2297 void SetBlock(HBasicBlock* block) { block_ = block; }
IsInBlock()2298 bool IsInBlock() const { return block_ != nullptr; }
IsInLoop()2299 bool IsInLoop() const { return block_->IsInLoop(); }
IsLoopHeaderPhi()2300 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
IsIrreducibleLoopHeaderPhi()2301 bool IsIrreducibleLoopHeaderPhi() const {
2302 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
2303 }
2304
2305 virtual ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() = 0;
2306
GetInputRecords()2307 ArrayRef<const HUserRecord<HInstruction*>> GetInputRecords() const {
2308 // One virtual method is enough, just const_cast<> and then re-add the const.
2309 return ArrayRef<const HUserRecord<HInstruction*>>(
2310 const_cast<HInstruction*>(this)->GetInputRecords());
2311 }
2312
GetInputs()2313 HInputsRef GetInputs() {
2314 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2315 }
2316
GetInputs()2317 HConstInputsRef GetInputs() const {
2318 return MakeTransformArrayRef(GetInputRecords(), HInputExtractor());
2319 }
2320
InputCount()2321 size_t InputCount() const { return GetInputRecords().size(); }
InputAt(size_t i)2322 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
2323
HasInput(HInstruction * input)2324 bool HasInput(HInstruction* input) const {
2325 for (const HInstruction* i : GetInputs()) {
2326 if (i == input) {
2327 return true;
2328 }
2329 }
2330 return false;
2331 }
2332
SetRawInputAt(size_t index,HInstruction * input)2333 void SetRawInputAt(size_t index, HInstruction* input) {
2334 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
2335 }
2336
2337 virtual void Accept(HGraphVisitor* visitor) = 0;
2338 virtual const char* DebugName() const = 0;
2339
GetType()2340 DataType::Type GetType() const {
2341 return TypeField::Decode(GetPackedFields());
2342 }
2343
NeedsEnvironment()2344 virtual bool NeedsEnvironment() const { return false; }
NeedsBss()2345 virtual bool NeedsBss() const {
2346 return false;
2347 }
2348
GetDexPc()2349 uint32_t GetDexPc() const { return dex_pc_; }
2350
IsControlFlow()2351 virtual bool IsControlFlow() const { return false; }
2352
2353 // Can the instruction throw?
2354 // TODO: We should rename to CanVisiblyThrow, as some instructions (like HNewInstance),
2355 // could throw OOME, but it is still OK to remove them if they are unused.
CanThrow()2356 virtual bool CanThrow() const { return false; }
2357
2358 // Does the instruction always throw an exception unconditionally?
AlwaysThrows()2359 virtual bool AlwaysThrows() const { return false; }
2360 // Will this instruction only cause async exceptions if it causes any at all?
OnlyThrowsAsyncExceptions()2361 virtual bool OnlyThrowsAsyncExceptions() const {
2362 return false;
2363 }
2364
CanThrowIntoCatchBlock()2365 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
2366
HasSideEffects()2367 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
DoesAnyWrite()2368 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
2369
2370 // Does not apply for all instructions, but having this at top level greatly
2371 // simplifies the null check elimination.
2372 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
CanBeNull()2373 virtual bool CanBeNull() const {
2374 DCHECK_EQ(GetType(), DataType::Type::kReference) << "CanBeNull only applies to reference types";
2375 return true;
2376 }
2377
CanDoImplicitNullCheckOn(HInstruction * obj)2378 virtual bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const { return false; }
2379
2380 // If this instruction will do an implicit null check, return the `HNullCheck` associated
2381 // with it. Otherwise return null.
GetImplicitNullCheck()2382 HNullCheck* GetImplicitNullCheck() const {
2383 // Go over previous non-move instructions that are emitted at use site.
2384 HInstruction* prev_not_move = GetPreviousDisregardingMoves();
2385 while (prev_not_move != nullptr && prev_not_move->IsEmittedAtUseSite()) {
2386 if (prev_not_move->IsNullCheck()) {
2387 return prev_not_move->AsNullCheck();
2388 }
2389 prev_not_move = prev_not_move->GetPreviousDisregardingMoves();
2390 }
2391 return nullptr;
2392 }
2393
IsActualObject()2394 virtual bool IsActualObject() const {
2395 return GetType() == DataType::Type::kReference;
2396 }
2397
2398 // Sets the ReferenceTypeInfo. The RTI must be valid.
2399 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
2400 // Same as above, but we only set it if it's valid. Otherwise, we don't change the current RTI.
2401 void SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti);
2402
GetReferenceTypeInfo()2403 ReferenceTypeInfo GetReferenceTypeInfo() const {
2404 DCHECK_EQ(GetType(), DataType::Type::kReference);
2405 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
2406 GetPackedFlag<kFlagReferenceTypeIsExact>());
2407 }
2408
AddUseAt(HInstruction * user,size_t index)2409 void AddUseAt(HInstruction* user, size_t index) {
2410 DCHECK(user != nullptr);
2411 // Note: fixup_end remains valid across push_front().
2412 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
2413 ArenaAllocator* allocator = user->GetBlock()->GetGraph()->GetAllocator();
2414 HUseListNode<HInstruction*>* new_node =
2415 new (allocator) HUseListNode<HInstruction*>(user, index);
2416 uses_.push_front(*new_node);
2417 FixUpUserRecordsAfterUseInsertion(fixup_end);
2418 }
2419
AddEnvUseAt(HEnvironment * user,size_t index)2420 void AddEnvUseAt(HEnvironment* user, size_t index) {
2421 DCHECK(user != nullptr);
2422 // Note: env_fixup_end remains valid across push_front().
2423 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
2424 HUseListNode<HEnvironment*>* new_node =
2425 new (GetBlock()->GetGraph()->GetAllocator()) HUseListNode<HEnvironment*>(user, index);
2426 env_uses_.push_front(*new_node);
2427 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
2428 }
2429
RemoveAsUserOfInput(size_t input)2430 void RemoveAsUserOfInput(size_t input) {
2431 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
2432 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2433 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2434 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2435 }
2436
RemoveAsUserOfAllInputs()2437 void RemoveAsUserOfAllInputs() {
2438 for (const HUserRecord<HInstruction*>& input_use : GetInputRecords()) {
2439 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
2440 input_use.GetInstruction()->uses_.erase_after(before_use_node);
2441 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
2442 }
2443 }
2444
GetUses()2445 const HUseList<HInstruction*>& GetUses() const { return uses_; }
GetEnvUses()2446 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
2447
HasUses()2448 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
HasEnvironmentUses()2449 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
HasNonEnvironmentUses()2450 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
HasOnlyOneNonEnvironmentUse()2451 bool HasOnlyOneNonEnvironmentUse() const {
2452 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
2453 }
2454
IsRemovable()2455 bool IsRemovable() const {
2456 return
2457 !DoesAnyWrite() &&
2458 // TODO(solanes): Merge calls from IsSuspendCheck to IsControlFlow into one that doesn't
2459 // do virtual dispatching.
2460 !IsSuspendCheck() &&
2461 !IsNop() &&
2462 !IsParameterValue() &&
2463 // If we added an explicit barrier then we should keep it.
2464 !IsMemoryBarrier() &&
2465 !IsConstructorFence() &&
2466 !IsControlFlow() &&
2467 !CanThrow();
2468 }
2469
IsDeadAndRemovable()2470 bool IsDeadAndRemovable() const {
2471 return !HasUses() && IsRemovable();
2472 }
2473
IsPhiDeadAndRemovable()2474 bool IsPhiDeadAndRemovable() const {
2475 DCHECK(IsPhi());
2476 DCHECK(IsRemovable()) << " phis are always removable";
2477 return !HasUses();
2478 }
2479
2480 // Does this instruction dominate `other_instruction`?
2481 // Aborts if this instruction and `other_instruction` are different phis.
2482 bool Dominates(HInstruction* other_instruction) const;
2483
2484 // Same but with `strictly dominates` i.e. returns false if this instruction and
2485 // `other_instruction` are the same.
2486 bool StrictlyDominates(HInstruction* other_instruction) const;
2487
GetId()2488 int GetId() const { return id_; }
SetId(int id)2489 void SetId(int id) { id_ = id; }
2490
GetSsaIndex()2491 int GetSsaIndex() const { return ssa_index_; }
SetSsaIndex(int ssa_index)2492 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
HasSsaIndex()2493 bool HasSsaIndex() const { return ssa_index_ != -1; }
2494
HasEnvironment()2495 bool HasEnvironment() const { return environment_ != nullptr; }
GetEnvironment()2496 HEnvironment* GetEnvironment() const { return environment_; }
GetAllEnvironments()2497 IterationRange<HEnvironmentIterator> GetAllEnvironments() const {
2498 return MakeIterationRange(HEnvironmentIterator(GetEnvironment()),
2499 HEnvironmentIterator(nullptr));
2500 }
2501 // Set the `environment_` field. Raw because this method does not
2502 // update the uses lists.
SetRawEnvironment(HEnvironment * environment)2503 void SetRawEnvironment(HEnvironment* environment) {
2504 DCHECK(environment_ == nullptr);
2505 DCHECK_EQ(environment->GetHolder(), this);
2506 environment_ = environment;
2507 }
2508
InsertRawEnvironment(HEnvironment * environment)2509 void InsertRawEnvironment(HEnvironment* environment) {
2510 DCHECK(environment_ != nullptr);
2511 DCHECK_EQ(environment->GetHolder(), this);
2512 DCHECK(environment->GetParent() == nullptr);
2513 environment->parent_ = environment_;
2514 environment_ = environment;
2515 }
2516
2517 void RemoveEnvironment();
2518
2519 // Set the environment of this instruction, copying it from `environment`. While
2520 // copying, the uses lists are being updated.
CopyEnvironmentFrom(HEnvironment * environment)2521 void CopyEnvironmentFrom(HEnvironment* environment) {
2522 DCHECK(environment_ == nullptr);
2523 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2524 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2525 environment_->CopyFrom(environment);
2526 if (environment->GetParent() != nullptr) {
2527 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2528 }
2529 }
2530
CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment * environment,HBasicBlock * block)2531 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
2532 HBasicBlock* block) {
2533 DCHECK(environment_ == nullptr);
2534 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetAllocator();
2535 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
2536 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
2537 if (environment->GetParent() != nullptr) {
2538 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
2539 }
2540 }
2541
2542 // Returns the number of entries in the environment. Typically, that is the
2543 // number of dex registers in a method. It could be more in case of inlining.
2544 size_t EnvironmentSize() const;
2545
GetLocations()2546 LocationSummary* GetLocations() const { return locations_; }
SetLocations(LocationSummary * locations)2547 void SetLocations(LocationSummary* locations) { locations_ = locations; }
2548
2549 void ReplaceWith(HInstruction* instruction);
2550 void ReplaceUsesDominatedBy(HInstruction* dominator,
2551 HInstruction* replacement,
2552 bool strictly_dominated = true);
2553 void ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement);
2554 void ReplaceInput(HInstruction* replacement, size_t index);
2555
2556 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
2557 // uses of this instruction by `other` are *not* updated.
ReplaceWithExceptInReplacementAtIndex(HInstruction * other,size_t use_index)2558 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
2559 ReplaceWith(other);
2560 other->ReplaceInput(this, use_index);
2561 }
2562
2563 // Move `this` instruction before `cursor`
2564 void MoveBefore(HInstruction* cursor, bool do_checks = true);
2565
2566 // Move `this` before its first user and out of any loops. If there is no
2567 // out-of-loop user that dominates all other users, move the instruction
2568 // to the end of the out-of-loop common dominator of the user's blocks.
2569 //
2570 // This can be used only on non-throwing instructions with no side effects that
2571 // have at least one use but no environment uses.
2572 void MoveBeforeFirstUserAndOutOfLoops();
2573
2574 #define INSTRUCTION_TYPE_CHECK(type, super) \
2575 bool Is##type() const;
2576
2577 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
2578 #undef INSTRUCTION_TYPE_CHECK
2579
2580 #define INSTRUCTION_TYPE_CAST(type, super) \
2581 const H##type* As##type() const; \
2582 H##type* As##type(); \
2583 const H##type* As##type##OrNull() const; \
2584 H##type* As##type##OrNull();
2585
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)2586 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
2587 #undef INSTRUCTION_TYPE_CAST
2588
2589 // Return a clone of the instruction if it is clonable (shallow copy by default, custom copy
2590 // if a custom copy-constructor is provided for a particular type). If IsClonable() is false for
2591 // the instruction then the behaviour of this function is undefined.
2592 //
2593 // Note: It is semantically valid to create a clone of the instruction only until
2594 // prepare_for_register_allocator phase as lifetime, intervals and codegen info are not
2595 // copied.
2596 //
2597 // Note: HEnvironment and some other fields are not copied and are set to default values, see
2598 // 'explicit HInstruction(const HInstruction& other)' for details.
2599 virtual HInstruction* Clone([[maybe_unused]] ArenaAllocator* arena) const {
2600 LOG(FATAL) << "Cloning is not implemented for the instruction " <<
2601 DebugName() << " " << GetId();
2602 UNREACHABLE();
2603 }
2604
IsFieldAccess()2605 virtual bool IsFieldAccess() const {
2606 return false;
2607 }
2608
GetFieldInfo()2609 virtual const FieldInfo& GetFieldInfo() const {
2610 CHECK(IsFieldAccess()) << "Only callable on field accessors not " << DebugName() << " "
2611 << *this;
2612 LOG(FATAL) << "Must be overridden by field accessors. Not implemented by " << *this;
2613 UNREACHABLE();
2614 }
2615
2616 // Return whether instruction can be cloned (copied).
IsClonable()2617 virtual bool IsClonable() const { return false; }
2618
2619 // Returns whether the instruction can be moved within the graph.
2620 // TODO: this method is used by LICM and GVN with possibly different
2621 // meanings? split and rename?
CanBeMoved()2622 virtual bool CanBeMoved() const { return false; }
2623
2624 // Returns whether any data encoded in the two instructions is equal.
2625 // This method does not look at the inputs. Both instructions must be
2626 // of the same type, otherwise the method has undefined behavior.
InstructionDataEquals(const HInstruction * other)2627 virtual bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const {
2628 return false;
2629 }
2630
2631 // Returns whether two instructions are equal, that is:
2632 // 1) They have the same type and contain the same data (InstructionDataEquals).
2633 // 2) Their inputs are identical.
2634 bool Equals(const HInstruction* other) const;
2635
GetKind()2636 InstructionKind GetKind() const { return GetPackedField<InstructionKindField>(); }
2637
ComputeHashCode()2638 virtual size_t ComputeHashCode() const {
2639 size_t result = GetKind();
2640 for (const HInstruction* input : GetInputs()) {
2641 result = (result * 31) + input->GetId();
2642 }
2643 return result;
2644 }
2645
GetSideEffects()2646 SideEffects GetSideEffects() const { return side_effects_; }
SetSideEffects(SideEffects other)2647 void SetSideEffects(SideEffects other) { side_effects_ = other; }
AddSideEffects(SideEffects other)2648 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
2649
GetLifetimePosition()2650 size_t GetLifetimePosition() const { return lifetime_position_; }
SetLifetimePosition(size_t position)2651 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
GetLiveInterval()2652 LiveInterval* GetLiveInterval() const { return live_interval_; }
SetLiveInterval(LiveInterval * interval)2653 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
HasLiveInterval()2654 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2655
IsSuspendCheckEntry()2656 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2657
2658 // Returns whether the code generation of the instruction will require to have access
2659 // to the current method. Such instructions are:
2660 // (1): Instructions that require an environment, as calling the runtime requires
2661 // to walk the stack and have the current method stored at a specific stack address.
2662 // (2): HCurrentMethod, potentially used by HInvokeStaticOrDirect, HLoadString, or HLoadClass
2663 // to access the dex cache.
NeedsCurrentMethod()2664 bool NeedsCurrentMethod() const {
2665 return NeedsEnvironment() || IsCurrentMethod();
2666 }
2667
2668 // Does this instruction have any use in an environment before
2669 // control flow hits 'other'?
2670 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2671
2672 // Remove all references to environment uses of this instruction.
2673 // The caller must ensure that this is safe to do.
2674 void RemoveEnvironmentUsers();
2675
IsEmittedAtUseSite()2676 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
MarkEmittedAtUseSite()2677 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2678
2679 protected:
2680 // If set, the machine code for this instruction is assumed to be generated by
2681 // its users. Used by liveness analysis to compute use positions accordingly.
2682 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2683 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2684 static constexpr size_t kFieldInstructionKind = kFlagReferenceTypeIsExact + 1;
2685 static constexpr size_t kFieldInstructionKindSize =
2686 MinimumBitsToStore(static_cast<size_t>(InstructionKind::kLastInstructionKind - 1));
2687 static constexpr size_t kFieldType =
2688 kFieldInstructionKind + kFieldInstructionKindSize;
2689 static constexpr size_t kFieldTypeSize =
2690 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
2691 static constexpr size_t kNumberOfGenericPackedBits = kFieldType + kFieldTypeSize;
2692 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2693
2694 static_assert(kNumberOfGenericPackedBits <= kMaxNumberOfPackedBits,
2695 "Too many generic packed fields");
2696
2697 using TypeField = BitField<DataType::Type, kFieldType, kFieldTypeSize>;
2698
InputRecordAt(size_t i)2699 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const {
2700 return GetInputRecords()[i];
2701 }
2702
SetRawInputRecordAt(size_t index,const HUserRecord<HInstruction * > & input)2703 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) {
2704 ArrayRef<HUserRecord<HInstruction*>> input_records = GetInputRecords();
2705 input_records[index] = input;
2706 }
2707
GetPackedFields()2708 uint32_t GetPackedFields() const {
2709 return packed_fields_;
2710 }
2711
2712 template <size_t flag>
GetPackedFlag()2713 bool GetPackedFlag() const {
2714 return (packed_fields_ & (1u << flag)) != 0u;
2715 }
2716
2717 template <size_t flag>
2718 void SetPackedFlag(bool value = true) {
2719 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2720 }
2721
2722 template <typename BitFieldType>
GetPackedField()2723 typename BitFieldType::value_type GetPackedField() const {
2724 return BitFieldType::Decode(packed_fields_);
2725 }
2726
2727 template <typename BitFieldType>
SetPackedField(typename BitFieldType::value_type value)2728 void SetPackedField(typename BitFieldType::value_type value) {
2729 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2730 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2731 }
2732
2733 // Copy construction for the instruction (used for Clone function).
2734 //
2735 // Fields (e.g. lifetime, intervals and codegen info) associated with phases starting from
2736 // prepare_for_register_allocator are not copied (set to default values).
2737 //
2738 // Copy constructors must be provided for every HInstruction type; default copy constructor is
2739 // fine for most of them. However for some of the instructions a custom copy constructor must be
2740 // specified (when instruction has non-trivially copyable fields and must have a special behaviour
2741 // for copying them).
HInstruction(const HInstruction & other)2742 explicit HInstruction(const HInstruction& other)
2743 : previous_(nullptr),
2744 next_(nullptr),
2745 block_(nullptr),
2746 dex_pc_(other.dex_pc_),
2747 id_(-1),
2748 ssa_index_(-1),
2749 packed_fields_(other.packed_fields_),
2750 environment_(nullptr),
2751 locations_(nullptr),
2752 live_interval_(nullptr),
2753 lifetime_position_(kNoLifetime),
2754 side_effects_(other.side_effects_),
2755 reference_type_handle_(other.reference_type_handle_) {
2756 }
2757
2758 private:
2759 using InstructionKindField =
2760 BitField<InstructionKind, kFieldInstructionKind, kFieldInstructionKindSize>;
2761
FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction * >::iterator fixup_end)2762 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2763 auto before_use_node = uses_.before_begin();
2764 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2765 HInstruction* user = use_node->GetUser();
2766 size_t input_index = use_node->GetIndex();
2767 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2768 before_use_node = use_node;
2769 }
2770 }
2771
FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction * >::iterator before_use_node)2772 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2773 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2774 if (next != uses_.end()) {
2775 HInstruction* next_user = next->GetUser();
2776 size_t next_index = next->GetIndex();
2777 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2778 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2779 }
2780 }
2781
FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment * >::iterator env_fixup_end)2782 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2783 auto before_env_use_node = env_uses_.before_begin();
2784 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2785 HEnvironment* user = env_use_node->GetUser();
2786 size_t input_index = env_use_node->GetIndex();
2787 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2788 before_env_use_node = env_use_node;
2789 }
2790 }
2791
FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment * >::iterator before_env_use_node)2792 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2793 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2794 if (next != env_uses_.end()) {
2795 HEnvironment* next_user = next->GetUser();
2796 size_t next_index = next->GetIndex();
2797 DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2798 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2799 }
2800 }
2801
2802 HInstruction* previous_;
2803 HInstruction* next_;
2804 HBasicBlock* block_;
2805 const uint32_t dex_pc_;
2806
2807 // An instruction gets an id when it is added to the graph.
2808 // It reflects creation order. A negative id means the instruction
2809 // has not been added to the graph.
2810 int id_;
2811
2812 // When doing liveness analysis, instructions that have uses get an SSA index.
2813 int ssa_index_;
2814
2815 // Packed fields.
2816 uint32_t packed_fields_;
2817
2818 // List of instructions that have this instruction as input.
2819 HUseList<HInstruction*> uses_;
2820
2821 // List of environments that contain this instruction.
2822 HUseList<HEnvironment*> env_uses_;
2823
2824 // The environment associated with this instruction. Not null if the instruction
2825 // might jump out of the method.
2826 HEnvironment* environment_;
2827
2828 // Set by the code generator.
2829 LocationSummary* locations_;
2830
2831 // Set by the liveness analysis.
2832 LiveInterval* live_interval_;
2833
2834 // Set by the liveness analysis, this is the position in a linear
2835 // order of blocks where this instruction's live interval start.
2836 size_t lifetime_position_;
2837
2838 SideEffects side_effects_;
2839
2840 // The reference handle part of the reference type info.
2841 // The IsExact() flag is stored in packed fields.
2842 // TODO: for primitive types this should be marked as invalid.
2843 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2844
2845 friend class GraphChecker;
2846 friend class HBasicBlock;
2847 friend class HEnvironment;
2848 friend class HGraph;
2849 friend class HInstructionList;
2850 };
2851
2852 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs);
2853 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs);
2854 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs);
2855 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst);
2856 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst);
2857
2858 // Forward declarations for friends
2859 template <typename InnerIter> struct HSTLInstructionIterator;
2860
2861 // Iterates over the instructions, while preserving the next instruction
2862 // in case the current instruction gets removed from the list by the user
2863 // of this iterator.
2864 class HInstructionIterator : public ValueObject {
2865 public:
HInstructionIterator(const HInstructionList & instructions)2866 explicit HInstructionIterator(const HInstructionList& instructions)
2867 : instruction_(instructions.first_instruction_) {
2868 next_ = Done() ? nullptr : instruction_->GetNext();
2869 }
2870
Done()2871 bool Done() const { return instruction_ == nullptr; }
Current()2872 HInstruction* Current() const { return instruction_; }
Advance()2873 void Advance() {
2874 instruction_ = next_;
2875 next_ = Done() ? nullptr : instruction_->GetNext();
2876 }
2877
2878 private:
HInstructionIterator()2879 HInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2880
2881 HInstruction* instruction_;
2882 HInstruction* next_;
2883
2884 friend struct HSTLInstructionIterator<HInstructionIterator>;
2885 };
2886
2887 // Iterates over the instructions without saving the next instruction,
2888 // therefore handling changes in the graph potentially made by the user
2889 // of this iterator.
2890 class HInstructionIteratorHandleChanges : public ValueObject {
2891 public:
2892 explicit HInstructionIteratorHandleChanges(const HInstructionList& instructions)
2893 : instruction_(instructions.first_instruction_) {
2894 }
2895
2896 bool Done() const { return instruction_ == nullptr; }
2897 HInstruction* Current() const { return instruction_; }
2898 void Advance() {
2899 instruction_ = instruction_->GetNext();
2900 }
2901
2902 private:
2903 HInstructionIteratorHandleChanges() : instruction_(nullptr) {}
2904
2905 HInstruction* instruction_;
2906
2907 friend struct HSTLInstructionIterator<HInstructionIteratorHandleChanges>;
2908 };
2909
2910
2911 class HBackwardInstructionIterator : public ValueObject {
2912 public:
2913 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2914 : instruction_(instructions.last_instruction_) {
2915 next_ = Done() ? nullptr : instruction_->GetPrevious();
2916 }
2917
2918 explicit HBackwardInstructionIterator(HInstruction* instruction) : instruction_(instruction) {
2919 next_ = Done() ? nullptr : instruction_->GetPrevious();
2920 }
2921
2922 bool Done() const { return instruction_ == nullptr; }
2923 HInstruction* Current() const { return instruction_; }
2924 void Advance() {
2925 instruction_ = next_;
2926 next_ = Done() ? nullptr : instruction_->GetPrevious();
2927 }
2928
2929 private:
2930 HBackwardInstructionIterator() : instruction_(nullptr), next_(nullptr) {}
2931
2932 HInstruction* instruction_;
2933 HInstruction* next_;
2934
2935 friend struct HSTLInstructionIterator<HBackwardInstructionIterator>;
2936 };
2937
2938 template <typename InnerIter>
2939 struct HSTLInstructionIterator : public ValueObject {
2940 public:
2941 using iterator_category = std::forward_iterator_tag;
2942 using value_type = HInstruction*;
2943 using difference_type = ptrdiff_t;
2944 using pointer = void;
2945 using reference = void;
2946
2947 static_assert(std::is_same_v<InnerIter, HBackwardInstructionIterator> ||
2948 std::is_same_v<InnerIter, HInstructionIterator> ||
2949 std::is_same_v<InnerIter, HInstructionIteratorHandleChanges>,
2950 "Unknown wrapped iterator!");
2951
2952 explicit HSTLInstructionIterator(InnerIter inner) : inner_(inner) {}
2953 HInstruction* operator*() const {
2954 DCHECK(inner_.Current() != nullptr);
2955 return inner_.Current();
2956 }
2957
2958 HSTLInstructionIterator<InnerIter>& operator++() {
2959 DCHECK(*this != HSTLInstructionIterator<InnerIter>::EndIter());
2960 inner_.Advance();
2961 return *this;
2962 }
2963
2964 HSTLInstructionIterator<InnerIter> operator++(int) {
2965 HSTLInstructionIterator<InnerIter> prev(*this);
2966 ++(*this);
2967 return prev;
2968 }
2969
2970 bool operator==(const HSTLInstructionIterator<InnerIter>& other) const {
2971 return inner_.Current() == other.inner_.Current();
2972 }
2973
2974 bool operator!=(const HSTLInstructionIterator<InnerIter>& other) const {
2975 return !(*this == other);
2976 }
2977
2978 static HSTLInstructionIterator<InnerIter> EndIter() {
2979 return HSTLInstructionIterator<InnerIter>(InnerIter());
2980 }
2981
2982 private:
2983 InnerIter inner_;
2984 };
2985
2986 template <typename InnerIter>
2987 IterationRange<HSTLInstructionIterator<InnerIter>> MakeSTLInstructionIteratorRange(InnerIter iter) {
2988 return MakeIterationRange(HSTLInstructionIterator<InnerIter>(iter),
2989 HSTLInstructionIterator<InnerIter>::EndIter());
2990 }
2991
2992 class HVariableInputSizeInstruction : public HInstruction {
2993 public:
2994 using HInstruction::GetInputRecords; // Keep the const version visible.
2995 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
2996 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
2997 }
2998
2999 void AddInput(HInstruction* input);
3000 void InsertInputAt(size_t index, HInstruction* input);
3001 void RemoveInputAt(size_t index);
3002
3003 // Removes all the inputs.
3004 // Also removes this instructions from each input's use list
3005 // (for non-environment uses only).
3006 void RemoveAllInputs();
3007
3008 protected:
3009 HVariableInputSizeInstruction(InstructionKind inst_kind,
3010 SideEffects side_effects,
3011 uint32_t dex_pc,
3012 ArenaAllocator* allocator,
3013 size_t number_of_inputs,
3014 ArenaAllocKind kind)
3015 : HInstruction(inst_kind, side_effects, dex_pc),
3016 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
3017 HVariableInputSizeInstruction(InstructionKind inst_kind,
3018 DataType::Type type,
3019 SideEffects side_effects,
3020 uint32_t dex_pc,
3021 ArenaAllocator* allocator,
3022 size_t number_of_inputs,
3023 ArenaAllocKind kind)
3024 : HInstruction(inst_kind, type, side_effects, dex_pc),
3025 inputs_(number_of_inputs, allocator->Adapter(kind)) {}
3026
3027 DEFAULT_COPY_CONSTRUCTOR(VariableInputSizeInstruction);
3028
3029 ArenaVector<HUserRecord<HInstruction*>> inputs_;
3030 };
3031
3032 template<size_t N>
3033 class HExpression : public HInstruction {
3034 public:
3035 HExpression<N>(InstructionKind kind, SideEffects side_effects, uint32_t dex_pc)
3036 : HInstruction(kind, side_effects, dex_pc), inputs_() {}
3037 HExpression<N>(InstructionKind kind,
3038 DataType::Type type,
3039 SideEffects side_effects,
3040 uint32_t dex_pc)
3041 : HInstruction(kind, type, side_effects, dex_pc), inputs_() {}
3042 virtual ~HExpression() {}
3043
3044 using HInstruction::GetInputRecords; // Keep the const version visible.
3045 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
3046 return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
3047 }
3048
3049 protected:
3050 DEFAULT_COPY_CONSTRUCTOR(Expression<N>);
3051
3052 private:
3053 std::array<HUserRecord<HInstruction*>, N> inputs_;
3054
3055 friend class SsaBuilder;
3056 };
3057
3058 // HExpression specialization for N=0.
3059 template<>
3060 class HExpression<0> : public HInstruction {
3061 public:
3062 using HInstruction::HInstruction;
3063
3064 virtual ~HExpression() {}
3065
3066 using HInstruction::GetInputRecords; // Keep the const version visible.
3067 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
3068 return ArrayRef<HUserRecord<HInstruction*>>();
3069 }
3070
3071 protected:
3072 DEFAULT_COPY_CONSTRUCTOR(Expression<0>);
3073
3074 private:
3075 friend class SsaBuilder;
3076 };
3077
3078 class HMethodEntryHook : public HExpression<0> {
3079 public:
3080 explicit HMethodEntryHook(uint32_t dex_pc)
3081 : HExpression(kMethodEntryHook, SideEffects::All(), dex_pc) {}
3082
3083 bool NeedsEnvironment() const override {
3084 return true;
3085 }
3086
3087 bool CanThrow() const override { return true; }
3088
3089 DECLARE_INSTRUCTION(MethodEntryHook);
3090
3091 protected:
3092 DEFAULT_COPY_CONSTRUCTOR(MethodEntryHook);
3093 };
3094
3095 class HMethodExitHook : public HExpression<1> {
3096 public:
3097 HMethodExitHook(HInstruction* value, uint32_t dex_pc)
3098 : HExpression(kMethodExitHook, SideEffects::All(), dex_pc) {
3099 SetRawInputAt(0, value);
3100 }
3101
3102 bool NeedsEnvironment() const override {
3103 return true;
3104 }
3105
3106 bool CanThrow() const override { return true; }
3107
3108 DECLARE_INSTRUCTION(MethodExitHook);
3109
3110 protected:
3111 DEFAULT_COPY_CONSTRUCTOR(MethodExitHook);
3112 };
3113
3114 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
3115 // instruction that branches to the exit block.
3116 class HReturnVoid final : public HExpression<0> {
3117 public:
3118 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
3119 : HExpression(kReturnVoid, SideEffects::None(), dex_pc) {
3120 }
3121
3122 bool IsControlFlow() const override { return true; }
3123
3124 DECLARE_INSTRUCTION(ReturnVoid);
3125
3126 protected:
3127 DEFAULT_COPY_CONSTRUCTOR(ReturnVoid);
3128 };
3129
3130 // Represents dex's RETURN opcodes. A HReturn is a control flow
3131 // instruction that branches to the exit block.
3132 class HReturn final : public HExpression<1> {
3133 public:
3134 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
3135 : HExpression(kReturn, SideEffects::None(), dex_pc) {
3136 SetRawInputAt(0, value);
3137 }
3138
3139 bool IsControlFlow() const override { return true; }
3140
3141 DECLARE_INSTRUCTION(Return);
3142
3143 protected:
3144 DEFAULT_COPY_CONSTRUCTOR(Return);
3145 };
3146
3147 class HPhi final : public HVariableInputSizeInstruction {
3148 public:
3149 HPhi(ArenaAllocator* allocator,
3150 uint32_t reg_number,
3151 size_t number_of_inputs,
3152 DataType::Type type,
3153 uint32_t dex_pc = kNoDexPc)
3154 : HVariableInputSizeInstruction(
3155 kPhi,
3156 ToPhiType(type),
3157 SideEffects::None(),
3158 dex_pc,
3159 allocator,
3160 number_of_inputs,
3161 kArenaAllocPhiInputs),
3162 reg_number_(reg_number) {
3163 DCHECK_NE(GetType(), DataType::Type::kVoid);
3164 // Phis are constructed live and marked dead if conflicting or unused.
3165 // Individual steps of SsaBuilder should assume that if a phi has been
3166 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
3167 SetPackedFlag<kFlagIsLive>(true);
3168 SetPackedFlag<kFlagCanBeNull>(true);
3169 }
3170
3171 bool IsClonable() const override { return true; }
3172
3173 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
3174 static DataType::Type ToPhiType(DataType::Type type) {
3175 return DataType::Kind(type);
3176 }
3177
3178 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
3179
3180 void SetType(DataType::Type new_type) {
3181 // Make sure that only valid type changes occur. The following are allowed:
3182 // (1) int -> float/ref (primitive type propagation),
3183 // (2) long -> double (primitive type propagation).
3184 DCHECK(GetType() == new_type ||
3185 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kFloat32) ||
3186 (GetType() == DataType::Type::kInt32 && new_type == DataType::Type::kReference) ||
3187 (GetType() == DataType::Type::kInt64 && new_type == DataType::Type::kFloat64));
3188 SetPackedField<TypeField>(new_type);
3189 }
3190
3191 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
3192 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
3193
3194 uint32_t GetRegNumber() const { return reg_number_; }
3195
3196 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
3197 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
3198 bool IsDead() const { return !IsLive(); }
3199 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
3200
3201 bool IsVRegEquivalentOf(const HInstruction* other) const {
3202 return other != nullptr
3203 && other->IsPhi()
3204 && other->GetBlock() == GetBlock()
3205 && other->AsPhi()->GetRegNumber() == GetRegNumber();
3206 }
3207
3208 bool HasEquivalentPhi() const {
3209 if (GetPrevious() != nullptr && GetPrevious()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3210 return true;
3211 }
3212 if (GetNext() != nullptr && GetNext()->AsPhi()->GetRegNumber() == GetRegNumber()) {
3213 return true;
3214 }
3215 return false;
3216 }
3217
3218 // Returns the next equivalent phi (starting from the current one) or null if there is none.
3219 // An equivalent phi is a phi having the same dex register and type.
3220 // It assumes that phis with the same dex register are adjacent.
3221 HPhi* GetNextEquivalentPhiWithSameType() {
3222 HInstruction* next = GetNext();
3223 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
3224 if (next->GetType() == GetType()) {
3225 return next->AsPhi();
3226 }
3227 next = next->GetNext();
3228 }
3229 return nullptr;
3230 }
3231
3232 DECLARE_INSTRUCTION(Phi);
3233
3234 protected:
3235 DEFAULT_COPY_CONSTRUCTOR(Phi);
3236
3237 private:
3238 static constexpr size_t kFlagIsLive = HInstruction::kNumberOfGenericPackedBits;
3239 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
3240 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
3241 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3242
3243 const uint32_t reg_number_;
3244 };
3245
3246 // The exit instruction is the only instruction of the exit block.
3247 // Instructions aborting the method (HThrow and HReturn) must branch to the
3248 // exit block.
3249 class HExit final : public HExpression<0> {
3250 public:
3251 explicit HExit(uint32_t dex_pc = kNoDexPc)
3252 : HExpression(kExit, SideEffects::None(), dex_pc) {
3253 }
3254
3255 bool IsControlFlow() const override { return true; }
3256
3257 DECLARE_INSTRUCTION(Exit);
3258
3259 protected:
3260 DEFAULT_COPY_CONSTRUCTOR(Exit);
3261 };
3262
3263 // Jumps from one block to another.
3264 class HGoto final : public HExpression<0> {
3265 public:
3266 explicit HGoto(uint32_t dex_pc = kNoDexPc)
3267 : HExpression(kGoto, SideEffects::None(), dex_pc) {
3268 }
3269
3270 bool IsClonable() const override { return true; }
3271 bool IsControlFlow() const override { return true; }
3272
3273 HBasicBlock* GetSuccessor() const {
3274 return GetBlock()->GetSingleSuccessor();
3275 }
3276
3277 DECLARE_INSTRUCTION(Goto);
3278
3279 protected:
3280 DEFAULT_COPY_CONSTRUCTOR(Goto);
3281 };
3282
3283 class HConstant : public HExpression<0> {
3284 public:
3285 explicit HConstant(InstructionKind kind, DataType::Type type, uint32_t dex_pc = kNoDexPc)
3286 : HExpression(kind, type, SideEffects::None(), dex_pc) {
3287 }
3288
3289 bool CanBeMoved() const override { return true; }
3290
3291 // Is this constant -1 in the arithmetic sense?
3292 virtual bool IsMinusOne() const { return false; }
3293 // Is this constant 0 in the arithmetic sense?
3294 virtual bool IsArithmeticZero() const { return false; }
3295 // Is this constant a 0-bit pattern?
3296 virtual bool IsZeroBitPattern() const { return false; }
3297 // Is this constant 1 in the arithmetic sense?
3298 virtual bool IsOne() const { return false; }
3299
3300 virtual uint64_t GetValueAsUint64() const = 0;
3301
3302 DECLARE_ABSTRACT_INSTRUCTION(Constant);
3303
3304 protected:
3305 DEFAULT_COPY_CONSTRUCTOR(Constant);
3306 };
3307
3308 class HNullConstant final : public HConstant {
3309 public:
3310 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3311 return true;
3312 }
3313
3314 uint64_t GetValueAsUint64() const override { return 0; }
3315
3316 size_t ComputeHashCode() const override { return 0; }
3317
3318 // The null constant representation is a 0-bit pattern.
3319 bool IsZeroBitPattern() const override { return true; }
3320
3321 DECLARE_INSTRUCTION(NullConstant);
3322
3323 protected:
3324 DEFAULT_COPY_CONSTRUCTOR(NullConstant);
3325
3326 private:
3327 explicit HNullConstant(uint32_t dex_pc = kNoDexPc)
3328 : HConstant(kNullConstant, DataType::Type::kReference, dex_pc) {
3329 }
3330
3331 friend class HGraph;
3332 };
3333
3334 // Constants of the type int. Those can be from Dex instructions, or
3335 // synthesized (for example with the if-eqz instruction).
3336 class HIntConstant final : public HConstant {
3337 public:
3338 int32_t GetValue() const { return value_; }
3339
3340 uint64_t GetValueAsUint64() const override {
3341 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
3342 }
3343
3344 bool InstructionDataEquals(const HInstruction* other) const override {
3345 DCHECK(other->IsIntConstant()) << other->DebugName();
3346 return other->AsIntConstant()->value_ == value_;
3347 }
3348
3349 size_t ComputeHashCode() const override { return GetValue(); }
3350
3351 bool IsMinusOne() const override { return GetValue() == -1; }
3352 bool IsArithmeticZero() const override { return GetValue() == 0; }
3353 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3354 bool IsOne() const override { return GetValue() == 1; }
3355
3356 // Integer constants are used to encode Boolean values as well,
3357 // where 1 means true and 0 means false.
3358 bool IsTrue() const { return GetValue() == 1; }
3359 bool IsFalse() const { return GetValue() == 0; }
3360
3361 DECLARE_INSTRUCTION(IntConstant);
3362
3363 protected:
3364 DEFAULT_COPY_CONSTRUCTOR(IntConstant);
3365
3366 private:
3367 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3368 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc), value_(value) {
3369 }
3370 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
3371 : HConstant(kIntConstant, DataType::Type::kInt32, dex_pc),
3372 value_(value ? 1 : 0) {
3373 }
3374
3375 const int32_t value_;
3376
3377 friend class HGraph;
3378 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
3379 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
3380 };
3381
3382 class HLongConstant final : public HConstant {
3383 public:
3384 int64_t GetValue() const { return value_; }
3385
3386 uint64_t GetValueAsUint64() const override { return value_; }
3387
3388 bool InstructionDataEquals(const HInstruction* other) const override {
3389 DCHECK(other->IsLongConstant()) << other->DebugName();
3390 return other->AsLongConstant()->value_ == value_;
3391 }
3392
3393 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3394
3395 bool IsMinusOne() const override { return GetValue() == -1; }
3396 bool IsArithmeticZero() const override { return GetValue() == 0; }
3397 bool IsZeroBitPattern() const override { return GetValue() == 0; }
3398 bool IsOne() const override { return GetValue() == 1; }
3399
3400 DECLARE_INSTRUCTION(LongConstant);
3401
3402 protected:
3403 DEFAULT_COPY_CONSTRUCTOR(LongConstant);
3404
3405 private:
3406 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3407 : HConstant(kLongConstant, DataType::Type::kInt64, dex_pc),
3408 value_(value) {
3409 }
3410
3411 const int64_t value_;
3412
3413 friend class HGraph;
3414 };
3415
3416 class HFloatConstant final : public HConstant {
3417 public:
3418 float GetValue() const { return value_; }
3419
3420 uint64_t GetValueAsUint64() const override {
3421 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
3422 }
3423
3424 bool InstructionDataEquals(const HInstruction* other) const override {
3425 DCHECK(other->IsFloatConstant()) << other->DebugName();
3426 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
3427 }
3428
3429 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3430
3431 bool IsMinusOne() const override {
3432 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
3433 }
3434 bool IsArithmeticZero() const override {
3435 return std::fpclassify(value_) == FP_ZERO;
3436 }
3437 bool IsArithmeticPositiveZero() const {
3438 return IsArithmeticZero() && !std::signbit(value_);
3439 }
3440 bool IsArithmeticNegativeZero() const {
3441 return IsArithmeticZero() && std::signbit(value_);
3442 }
3443 bool IsZeroBitPattern() const override {
3444 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
3445 }
3446 bool IsOne() const override {
3447 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
3448 }
3449 bool IsNaN() const {
3450 return std::isnan(value_);
3451 }
3452
3453 DECLARE_INSTRUCTION(FloatConstant);
3454
3455 protected:
3456 DEFAULT_COPY_CONSTRUCTOR(FloatConstant);
3457
3458 private:
3459 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
3460 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3461 value_(value) {
3462 }
3463 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
3464 : HConstant(kFloatConstant, DataType::Type::kFloat32, dex_pc),
3465 value_(bit_cast<float, int32_t>(value)) {
3466 }
3467
3468 const float value_;
3469
3470 // Only the SsaBuilder and HGraph can create floating-point constants.
3471 friend class SsaBuilder;
3472 friend class HGraph;
3473 };
3474
3475 class HDoubleConstant final : public HConstant {
3476 public:
3477 double GetValue() const { return value_; }
3478
3479 uint64_t GetValueAsUint64() const override { return bit_cast<uint64_t, double>(value_); }
3480
3481 bool InstructionDataEquals(const HInstruction* other) const override {
3482 DCHECK(other->IsDoubleConstant()) << other->DebugName();
3483 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
3484 }
3485
3486 size_t ComputeHashCode() const override { return static_cast<size_t>(GetValue()); }
3487
3488 bool IsMinusOne() const override {
3489 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
3490 }
3491 bool IsArithmeticZero() const override {
3492 return std::fpclassify(value_) == FP_ZERO;
3493 }
3494 bool IsArithmeticPositiveZero() const {
3495 return IsArithmeticZero() && !std::signbit(value_);
3496 }
3497 bool IsArithmeticNegativeZero() const {
3498 return IsArithmeticZero() && std::signbit(value_);
3499 }
3500 bool IsZeroBitPattern() const override {
3501 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
3502 }
3503 bool IsOne() const override {
3504 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
3505 }
3506 bool IsNaN() const {
3507 return std::isnan(value_);
3508 }
3509
3510 DECLARE_INSTRUCTION(DoubleConstant);
3511
3512 protected:
3513 DEFAULT_COPY_CONSTRUCTOR(DoubleConstant);
3514
3515 private:
3516 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
3517 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3518 value_(value) {
3519 }
3520 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
3521 : HConstant(kDoubleConstant, DataType::Type::kFloat64, dex_pc),
3522 value_(bit_cast<double, int64_t>(value)) {
3523 }
3524
3525 const double value_;
3526
3527 // Only the SsaBuilder and HGraph can create floating-point constants.
3528 friend class SsaBuilder;
3529 friend class HGraph;
3530 };
3531
3532 // Conditional branch. A block ending with an HIf instruction must have
3533 // two successors.
3534 class HIf final : public HExpression<1> {
3535 public:
3536 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
3537 : HExpression(kIf, SideEffects::None(), dex_pc),
3538 true_count_(std::numeric_limits<uint16_t>::max()),
3539 false_count_(std::numeric_limits<uint16_t>::max()) {
3540 SetRawInputAt(0, input);
3541 }
3542
3543 bool IsClonable() const override { return true; }
3544 bool IsControlFlow() const override { return true; }
3545
3546 HBasicBlock* IfTrueSuccessor() const {
3547 return GetBlock()->GetSuccessors()[0];
3548 }
3549
3550 HBasicBlock* IfFalseSuccessor() const {
3551 return GetBlock()->GetSuccessors()[1];
3552 }
3553
3554 void SetTrueCount(uint16_t count) { true_count_ = count; }
3555 uint16_t GetTrueCount() const { return true_count_; }
3556
3557 void SetFalseCount(uint16_t count) { false_count_ = count; }
3558 uint16_t GetFalseCount() const { return false_count_; }
3559
3560 DECLARE_INSTRUCTION(If);
3561
3562 protected:
3563 DEFAULT_COPY_CONSTRUCTOR(If);
3564
3565 private:
3566 uint16_t true_count_;
3567 uint16_t false_count_;
3568 };
3569
3570
3571 // Abstract instruction which marks the beginning and/or end of a try block and
3572 // links it to the respective exception handlers. Behaves the same as a Goto in
3573 // non-exceptional control flow.
3574 // Normal-flow successor is stored at index zero, exception handlers under
3575 // higher indices in no particular order.
3576 class HTryBoundary final : public HExpression<0> {
3577 public:
3578 enum class BoundaryKind {
3579 kEntry,
3580 kExit,
3581 kLast = kExit
3582 };
3583
3584 // SideEffects::CanTriggerGC prevents instructions with SideEffects::DependOnGC to be alive
3585 // across the catch block entering edges as GC might happen during throwing an exception.
3586 // TryBoundary with BoundaryKind::kExit is conservatively used for that as there is no
3587 // HInstruction which a catch block must start from.
3588 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
3589 : HExpression(kTryBoundary,
3590 (kind == BoundaryKind::kExit) ? SideEffects::CanTriggerGC()
3591 : SideEffects::None(),
3592 dex_pc) {
3593 SetPackedField<BoundaryKindField>(kind);
3594 }
3595
3596 bool IsControlFlow() const override { return true; }
3597
3598 // Returns the block's non-exceptional successor (index zero).
3599 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
3600
3601 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
3602 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
3603 }
3604
3605 // Returns whether `handler` is among its exception handlers (non-zero index
3606 // successors).
3607 bool HasExceptionHandler(const HBasicBlock& handler) const {
3608 DCHECK(handler.IsCatchBlock());
3609 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
3610 }
3611
3612 // If not present already, adds `handler` to its block's list of exception
3613 // handlers.
3614 void AddExceptionHandler(HBasicBlock* handler) {
3615 if (!HasExceptionHandler(*handler)) {
3616 GetBlock()->AddSuccessor(handler);
3617 }
3618 }
3619
3620 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
3621 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
3622
3623 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
3624
3625 DECLARE_INSTRUCTION(TryBoundary);
3626
3627 protected:
3628 DEFAULT_COPY_CONSTRUCTOR(TryBoundary);
3629
3630 private:
3631 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
3632 static constexpr size_t kFieldBoundaryKindSize =
3633 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
3634 static constexpr size_t kNumberOfTryBoundaryPackedBits =
3635 kFieldBoundaryKind + kFieldBoundaryKindSize;
3636 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
3637 "Too many packed fields.");
3638 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
3639 };
3640
3641 // Deoptimize to interpreter, upon checking a condition.
3642 class HDeoptimize final : public HVariableInputSizeInstruction {
3643 public:
3644 // Use this constructor when the `HDeoptimize` acts as a barrier, where no code can move
3645 // across.
3646 HDeoptimize(ArenaAllocator* allocator,
3647 HInstruction* cond,
3648 DeoptimizationKind kind,
3649 uint32_t dex_pc)
3650 : HVariableInputSizeInstruction(
3651 kDeoptimize,
3652 SideEffects::All(),
3653 dex_pc,
3654 allocator,
3655 /* number_of_inputs= */ 1,
3656 kArenaAllocMisc) {
3657 SetPackedFlag<kFieldCanBeMoved>(false);
3658 SetPackedField<DeoptimizeKindField>(kind);
3659 SetRawInputAt(0, cond);
3660 }
3661
3662 bool IsClonable() const override { return true; }
3663
3664 // Use this constructor when the `HDeoptimize` guards an instruction, and any user
3665 // that relies on the deoptimization to pass should have its input be the `HDeoptimize`
3666 // instead of `guard`.
3667 // We set CanTriggerGC to prevent any intermediate address to be live
3668 // at the point of the `HDeoptimize`.
3669 HDeoptimize(ArenaAllocator* allocator,
3670 HInstruction* cond,
3671 HInstruction* guard,
3672 DeoptimizationKind kind,
3673 uint32_t dex_pc)
3674 : HVariableInputSizeInstruction(
3675 kDeoptimize,
3676 guard->GetType(),
3677 SideEffects::CanTriggerGC(),
3678 dex_pc,
3679 allocator,
3680 /* number_of_inputs= */ 2,
3681 kArenaAllocMisc) {
3682 SetPackedFlag<kFieldCanBeMoved>(true);
3683 SetPackedField<DeoptimizeKindField>(kind);
3684 SetRawInputAt(0, cond);
3685 SetRawInputAt(1, guard);
3686 }
3687
3688 bool CanBeMoved() const override { return GetPackedFlag<kFieldCanBeMoved>(); }
3689
3690 bool InstructionDataEquals(const HInstruction* other) const override {
3691 return (other->CanBeMoved() == CanBeMoved()) &&
3692 (other->AsDeoptimize()->GetDeoptimizationKind() == GetDeoptimizationKind());
3693 }
3694
3695 bool NeedsEnvironment() const override { return true; }
3696
3697 bool CanThrow() const override { return true; }
3698
3699 DeoptimizationKind GetDeoptimizationKind() const { return GetPackedField<DeoptimizeKindField>(); }
3700
3701 bool GuardsAnInput() const {
3702 return InputCount() == 2;
3703 }
3704
3705 HInstruction* GuardedInput() const {
3706 DCHECK(GuardsAnInput());
3707 return InputAt(1);
3708 }
3709
3710 void RemoveGuard() {
3711 RemoveInputAt(1);
3712 }
3713
3714 DECLARE_INSTRUCTION(Deoptimize);
3715
3716 protected:
3717 DEFAULT_COPY_CONSTRUCTOR(Deoptimize);
3718
3719 private:
3720 static constexpr size_t kFieldCanBeMoved = kNumberOfGenericPackedBits;
3721 static constexpr size_t kFieldDeoptimizeKind = kNumberOfGenericPackedBits + 1;
3722 static constexpr size_t kFieldDeoptimizeKindSize =
3723 MinimumBitsToStore(static_cast<size_t>(DeoptimizationKind::kLast));
3724 static constexpr size_t kNumberOfDeoptimizePackedBits =
3725 kFieldDeoptimizeKind + kFieldDeoptimizeKindSize;
3726 static_assert(kNumberOfDeoptimizePackedBits <= kMaxNumberOfPackedBits,
3727 "Too many packed fields.");
3728 using DeoptimizeKindField =
3729 BitField<DeoptimizationKind, kFieldDeoptimizeKind, kFieldDeoptimizeKindSize>;
3730 };
3731
3732 // Represents a should_deoptimize flag. Currently used for CHA-based devirtualization.
3733 // The compiled code checks this flag value in a guard before devirtualized call and
3734 // if it's true, starts to do deoptimization.
3735 // It has a 4-byte slot on stack.
3736 // TODO: allocate a register for this flag.
3737 class HShouldDeoptimizeFlag final : public HVariableInputSizeInstruction {
3738 public:
3739 // CHA guards are only optimized in a separate pass and it has no side effects
3740 // with regard to other passes.
3741 HShouldDeoptimizeFlag(ArenaAllocator* allocator, uint32_t dex_pc)
3742 : HVariableInputSizeInstruction(kShouldDeoptimizeFlag,
3743 DataType::Type::kInt32,
3744 SideEffects::None(),
3745 dex_pc,
3746 allocator,
3747 0,
3748 kArenaAllocCHA) {
3749 }
3750
3751 // We do all CHA guard elimination/motion in a single pass, after which there is no
3752 // further guard elimination/motion since a guard might have been used for justification
3753 // of the elimination of another guard. Therefore, we pretend this guard cannot be moved
3754 // to avoid other optimizations trying to move it.
3755 bool CanBeMoved() const override { return false; }
3756
3757 DECLARE_INSTRUCTION(ShouldDeoptimizeFlag);
3758
3759 protected:
3760 DEFAULT_COPY_CONSTRUCTOR(ShouldDeoptimizeFlag);
3761 };
3762
3763 // Represents the ArtMethod that was passed as a first argument to
3764 // the method. It is used by instructions that depend on it, like
3765 // instructions that work with the dex cache.
3766 class HCurrentMethod final : public HExpression<0> {
3767 public:
3768 explicit HCurrentMethod(DataType::Type type, uint32_t dex_pc = kNoDexPc)
3769 : HExpression(kCurrentMethod, type, SideEffects::None(), dex_pc) {
3770 }
3771
3772 DECLARE_INSTRUCTION(CurrentMethod);
3773
3774 protected:
3775 DEFAULT_COPY_CONSTRUCTOR(CurrentMethod);
3776 };
3777
3778 // Fetches an ArtMethod from the virtual table or the interface method table
3779 // of a class.
3780 class HClassTableGet final : public HExpression<1> {
3781 public:
3782 enum class TableKind {
3783 kVTable,
3784 kIMTable,
3785 kLast = kIMTable
3786 };
3787 HClassTableGet(HInstruction* cls,
3788 DataType::Type type,
3789 TableKind kind,
3790 size_t index,
3791 uint32_t dex_pc)
3792 : HExpression(kClassTableGet, type, SideEffects::None(), dex_pc),
3793 index_(index) {
3794 SetPackedField<TableKindField>(kind);
3795 SetRawInputAt(0, cls);
3796 }
3797
3798 bool IsClonable() const override { return true; }
3799 bool CanBeMoved() const override { return true; }
3800 bool InstructionDataEquals(const HInstruction* other) const override {
3801 return other->AsClassTableGet()->GetIndex() == index_ &&
3802 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
3803 }
3804
3805 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
3806 size_t GetIndex() const { return index_; }
3807
3808 DECLARE_INSTRUCTION(ClassTableGet);
3809
3810 protected:
3811 DEFAULT_COPY_CONSTRUCTOR(ClassTableGet);
3812
3813 private:
3814 static constexpr size_t kFieldTableKind = kNumberOfGenericPackedBits;
3815 static constexpr size_t kFieldTableKindSize =
3816 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
3817 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
3818 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
3819 "Too many packed fields.");
3820 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKindSize>;
3821
3822 // The index of the ArtMethod in the table.
3823 const size_t index_;
3824 };
3825
3826 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
3827 // have one successor for each entry in the switch table, and the final successor
3828 // will be the block containing the next Dex opcode.
3829 class HPackedSwitch final : public HExpression<1> {
3830 public:
3831 HPackedSwitch(int32_t start_value,
3832 uint32_t num_entries,
3833 HInstruction* input,
3834 uint32_t dex_pc = kNoDexPc)
3835 : HExpression(kPackedSwitch, SideEffects::None(), dex_pc),
3836 start_value_(start_value),
3837 num_entries_(num_entries) {
3838 SetRawInputAt(0, input);
3839 }
3840
3841 bool IsClonable() const override { return true; }
3842
3843 bool IsControlFlow() const override { return true; }
3844
3845 int32_t GetStartValue() const { return start_value_; }
3846
3847 uint32_t GetNumEntries() const { return num_entries_; }
3848
3849 HBasicBlock* GetDefaultBlock() const {
3850 // Last entry is the default block.
3851 return GetBlock()->GetSuccessors()[num_entries_];
3852 }
3853 DECLARE_INSTRUCTION(PackedSwitch);
3854
3855 protected:
3856 DEFAULT_COPY_CONSTRUCTOR(PackedSwitch);
3857
3858 private:
3859 const int32_t start_value_;
3860 const uint32_t num_entries_;
3861 };
3862
3863 class HUnaryOperation : public HExpression<1> {
3864 public:
3865 HUnaryOperation(InstructionKind kind,
3866 DataType::Type result_type,
3867 HInstruction* input,
3868 uint32_t dex_pc = kNoDexPc)
3869 : HExpression(kind, result_type, SideEffects::None(), dex_pc) {
3870 SetRawInputAt(0, input);
3871 }
3872
3873 // All of the UnaryOperation instructions are clonable.
3874 bool IsClonable() const override { return true; }
3875
3876 HInstruction* GetInput() const { return InputAt(0); }
3877 DataType::Type GetResultType() const { return GetType(); }
3878
3879 bool CanBeMoved() const override { return true; }
3880 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3881 return true;
3882 }
3883
3884 // Try to statically evaluate `this` and return a HConstant
3885 // containing the result of this evaluation. If `this` cannot
3886 // be evaluated as a constant, return null.
3887 HConstant* TryStaticEvaluation() const;
3888
3889 // Same but for `input` instead of GetInput().
3890 HConstant* TryStaticEvaluation(HInstruction* input) const;
3891
3892 // Apply this operation to `x`.
3893 virtual HConstant* Evaluate([[maybe_unused]] HIntConstant* x) const {
3894 LOG(FATAL) << DebugName() << " is not defined for int values";
3895 UNREACHABLE();
3896 }
3897 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x) const {
3898 LOG(FATAL) << DebugName() << " is not defined for long values";
3899 UNREACHABLE();
3900 }
3901 virtual HConstant* Evaluate([[maybe_unused]] HFloatConstant* x) const {
3902 LOG(FATAL) << DebugName() << " is not defined for float values";
3903 UNREACHABLE();
3904 }
3905 virtual HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x) const {
3906 LOG(FATAL) << DebugName() << " is not defined for double values";
3907 UNREACHABLE();
3908 }
3909
3910 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
3911
3912 protected:
3913 DEFAULT_COPY_CONSTRUCTOR(UnaryOperation);
3914 };
3915
3916 class HBinaryOperation : public HExpression<2> {
3917 public:
3918 HBinaryOperation(InstructionKind kind,
3919 DataType::Type result_type,
3920 HInstruction* left,
3921 HInstruction* right,
3922 SideEffects side_effects = SideEffects::None(),
3923 uint32_t dex_pc = kNoDexPc)
3924 : HExpression(kind, result_type, side_effects, dex_pc) {
3925 SetRawInputAt(0, left);
3926 SetRawInputAt(1, right);
3927 }
3928
3929 // All of the BinaryOperation instructions are clonable.
3930 bool IsClonable() const override { return true; }
3931
3932 HInstruction* GetLeft() const { return InputAt(0); }
3933 HInstruction* GetRight() const { return InputAt(1); }
3934 DataType::Type GetResultType() const { return GetType(); }
3935
3936 virtual bool IsCommutative() const { return false; }
3937
3938 // Put constant on the right.
3939 // Returns whether order is changed.
3940 bool OrderInputsWithConstantOnTheRight() {
3941 HInstruction* left = InputAt(0);
3942 HInstruction* right = InputAt(1);
3943 if (left->IsConstant() && !right->IsConstant()) {
3944 ReplaceInput(right, 0);
3945 ReplaceInput(left, 1);
3946 return true;
3947 }
3948 return false;
3949 }
3950
3951 // Order inputs by instruction id, but favor constant on the right side.
3952 // This helps GVN for commutative ops.
3953 void OrderInputs() {
3954 DCHECK(IsCommutative());
3955 HInstruction* left = InputAt(0);
3956 HInstruction* right = InputAt(1);
3957 if (left == right || (!left->IsConstant() && right->IsConstant())) {
3958 return;
3959 }
3960 if (OrderInputsWithConstantOnTheRight()) {
3961 return;
3962 }
3963 // Order according to instruction id.
3964 if (left->GetId() > right->GetId()) {
3965 ReplaceInput(right, 0);
3966 ReplaceInput(left, 1);
3967 }
3968 }
3969
3970 bool CanBeMoved() const override { return true; }
3971 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
3972 return true;
3973 }
3974
3975 // Try to statically evaluate `this` and return a HConstant
3976 // containing the result of this evaluation. If `this` cannot
3977 // be evaluated as a constant, return null.
3978 HConstant* TryStaticEvaluation() const;
3979
3980 // Same but for `left` and `right` instead of GetLeft() and GetRight().
3981 HConstant* TryStaticEvaluation(HInstruction* left, HInstruction* right) const;
3982
3983 // Apply this operation to `x` and `y`.
3984 virtual HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
3985 [[maybe_unused]] HNullConstant* y) const {
3986 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
3987 UNREACHABLE();
3988 }
3989 virtual HConstant* Evaluate([[maybe_unused]] HIntConstant* x,
3990 [[maybe_unused]] HIntConstant* y) const {
3991 LOG(FATAL) << DebugName() << " is not defined for the (int, int) case.";
3992 UNREACHABLE();
3993 }
3994 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x,
3995 [[maybe_unused]] HLongConstant* y) const {
3996 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
3997 UNREACHABLE();
3998 }
3999 virtual HConstant* Evaluate([[maybe_unused]] HLongConstant* x,
4000 [[maybe_unused]] HIntConstant* y) const {
4001 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
4002 UNREACHABLE();
4003 }
4004 virtual HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
4005 [[maybe_unused]] HFloatConstant* y) const {
4006 LOG(FATAL) << DebugName() << " is not defined for float values";
4007 UNREACHABLE();
4008 }
4009 virtual HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
4010 [[maybe_unused]] HDoubleConstant* y) const {
4011 LOG(FATAL) << DebugName() << " is not defined for double values";
4012 UNREACHABLE();
4013 }
4014
4015 // Returns an input that can legally be used as the right input and is
4016 // constant, or null.
4017 HConstant* GetConstantRight() const;
4018
4019 // If `GetConstantRight()` returns one of the input, this returns the other
4020 // one. Otherwise it returns null.
4021 HInstruction* GetLeastConstantLeft() const;
4022
4023 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
4024
4025 protected:
4026 DEFAULT_COPY_CONSTRUCTOR(BinaryOperation);
4027 };
4028
4029 // The comparison bias applies for floating point operations and indicates how NaN
4030 // comparisons are treated:
4031 enum class ComparisonBias { // private marker to avoid generate-operator-out.py from processing.
4032 kNoBias, // bias is not applicable (i.e. for long operation)
4033 kGtBias, // return 1 for NaN comparisons
4034 kLtBias, // return -1 for NaN comparisons
4035 kLast = kLtBias
4036 };
4037
4038 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs);
4039
4040 class HCondition : public HBinaryOperation {
4041 public:
4042 HCondition(InstructionKind kind,
4043 HInstruction* first,
4044 HInstruction* second,
4045 uint32_t dex_pc = kNoDexPc)
4046 : HBinaryOperation(kind,
4047 DataType::Type::kBool,
4048 first,
4049 second,
4050 SideEffects::None(),
4051 dex_pc) {
4052 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
4053 }
4054
4055 // For code generation purposes, returns whether this instruction is just before
4056 // `instruction`, and disregard moves in between.
4057 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
4058
4059 DECLARE_ABSTRACT_INSTRUCTION(Condition);
4060
4061 virtual IfCondition GetCondition() const = 0;
4062
4063 virtual IfCondition GetOppositeCondition() const = 0;
4064
4065 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
4066 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
4067
4068 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4069 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
4070
4071 bool InstructionDataEquals(const HInstruction* other) const override {
4072 return GetPackedFields() == other->AsCondition()->GetPackedFields();
4073 }
4074
4075 bool IsFPConditionTrueIfNaN() const {
4076 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4077 IfCondition if_cond = GetCondition();
4078 if (if_cond == kCondNE) {
4079 return true;
4080 } else if (if_cond == kCondEQ) {
4081 return false;
4082 }
4083 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
4084 }
4085
4086 bool IsFPConditionFalseIfNaN() const {
4087 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4088 IfCondition if_cond = GetCondition();
4089 if (if_cond == kCondEQ) {
4090 return true;
4091 } else if (if_cond == kCondNE) {
4092 return false;
4093 }
4094 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
4095 }
4096
4097 protected:
4098 // Needed if we merge a HCompare into a HCondition.
4099 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4100 static constexpr size_t kFieldComparisonBiasSize =
4101 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4102 static constexpr size_t kNumberOfConditionPackedBits =
4103 kFieldComparisonBias + kFieldComparisonBiasSize;
4104 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4105 using ComparisonBiasField =
4106 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4107
4108 template <typename T>
4109 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4110
4111 template <typename T>
4112 int32_t CompareFP(T x, T y) const {
4113 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4114 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4115 // Handle the bias.
4116 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
4117 }
4118
4119 // Return an integer constant containing the result of a condition evaluated at compile time.
4120 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
4121 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4122 }
4123
4124 DEFAULT_COPY_CONSTRUCTOR(Condition);
4125 };
4126
4127 // Instruction to check if two inputs are equal to each other.
4128 class HEqual final : public HCondition {
4129 public:
4130 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4131 : HCondition(kEqual, first, second, dex_pc) {
4132 }
4133
4134 bool IsCommutative() const override { return true; }
4135
4136 HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
4137 [[maybe_unused]] HNullConstant* y) const override {
4138 return MakeConstantCondition(true, GetDexPc());
4139 }
4140 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4141 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4142 }
4143 // In the following Evaluate methods, a HCompare instruction has
4144 // been merged into this HEqual instruction; evaluate it as
4145 // `Compare(x, y) == 0`.
4146 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4147 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
4148 GetDexPc());
4149 }
4150 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4151 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4152 }
4153 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4154 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4155 }
4156
4157 DECLARE_INSTRUCTION(Equal);
4158
4159 IfCondition GetCondition() const override {
4160 return kCondEQ;
4161 }
4162
4163 IfCondition GetOppositeCondition() const override {
4164 return kCondNE;
4165 }
4166
4167 protected:
4168 DEFAULT_COPY_CONSTRUCTOR(Equal);
4169
4170 private:
4171 template <typename T> static bool Compute(T x, T y) { return x == y; }
4172 };
4173
4174 class HNotEqual final : public HCondition {
4175 public:
4176 HNotEqual(HInstruction* first, HInstruction* second,
4177 uint32_t dex_pc = kNoDexPc)
4178 : HCondition(kNotEqual, first, second, dex_pc) {
4179 }
4180
4181 bool IsCommutative() const override { return true; }
4182
4183 HConstant* Evaluate([[maybe_unused]] HNullConstant* x,
4184 [[maybe_unused]] HNullConstant* y) const override {
4185 return MakeConstantCondition(false, GetDexPc());
4186 }
4187 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4188 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4189 }
4190 // In the following Evaluate methods, a HCompare instruction has
4191 // been merged into this HNotEqual instruction; evaluate it as
4192 // `Compare(x, y) != 0`.
4193 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4194 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4195 }
4196 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4197 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4198 }
4199 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4200 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4201 }
4202
4203 DECLARE_INSTRUCTION(NotEqual);
4204
4205 IfCondition GetCondition() const override {
4206 return kCondNE;
4207 }
4208
4209 IfCondition GetOppositeCondition() const override {
4210 return kCondEQ;
4211 }
4212
4213 protected:
4214 DEFAULT_COPY_CONSTRUCTOR(NotEqual);
4215
4216 private:
4217 template <typename T> static bool Compute(T x, T y) { return x != y; }
4218 };
4219
4220 class HLessThan final : public HCondition {
4221 public:
4222 HLessThan(HInstruction* first, HInstruction* second,
4223 uint32_t dex_pc = kNoDexPc)
4224 : HCondition(kLessThan, first, second, dex_pc) {
4225 }
4226
4227 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4228 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4229 }
4230 // In the following Evaluate methods, a HCompare instruction has
4231 // been merged into this HLessThan instruction; evaluate it as
4232 // `Compare(x, y) < 0`.
4233 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4234 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4235 }
4236 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4237 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4238 }
4239 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4240 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4241 }
4242
4243 DECLARE_INSTRUCTION(LessThan);
4244
4245 IfCondition GetCondition() const override {
4246 return kCondLT;
4247 }
4248
4249 IfCondition GetOppositeCondition() const override {
4250 return kCondGE;
4251 }
4252
4253 protected:
4254 DEFAULT_COPY_CONSTRUCTOR(LessThan);
4255
4256 private:
4257 template <typename T> static bool Compute(T x, T y) { return x < y; }
4258 };
4259
4260 class HLessThanOrEqual final : public HCondition {
4261 public:
4262 HLessThanOrEqual(HInstruction* first, HInstruction* second,
4263 uint32_t dex_pc = kNoDexPc)
4264 : HCondition(kLessThanOrEqual, first, second, dex_pc) {
4265 }
4266
4267 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4268 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4269 }
4270 // In the following Evaluate methods, a HCompare instruction has
4271 // been merged into this HLessThanOrEqual instruction; evaluate it as
4272 // `Compare(x, y) <= 0`.
4273 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4274 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4275 }
4276 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4277 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4278 }
4279 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4280 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4281 }
4282
4283 DECLARE_INSTRUCTION(LessThanOrEqual);
4284
4285 IfCondition GetCondition() const override {
4286 return kCondLE;
4287 }
4288
4289 IfCondition GetOppositeCondition() const override {
4290 return kCondGT;
4291 }
4292
4293 protected:
4294 DEFAULT_COPY_CONSTRUCTOR(LessThanOrEqual);
4295
4296 private:
4297 template <typename T> static bool Compute(T x, T y) { return x <= y; }
4298 };
4299
4300 class HGreaterThan final : public HCondition {
4301 public:
4302 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4303 : HCondition(kGreaterThan, first, second, dex_pc) {
4304 }
4305
4306 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4307 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4308 }
4309 // In the following Evaluate methods, a HCompare instruction has
4310 // been merged into this HGreaterThan instruction; evaluate it as
4311 // `Compare(x, y) > 0`.
4312 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4313 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4314 }
4315 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4316 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4317 }
4318 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4319 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4320 }
4321
4322 DECLARE_INSTRUCTION(GreaterThan);
4323
4324 IfCondition GetCondition() const override {
4325 return kCondGT;
4326 }
4327
4328 IfCondition GetOppositeCondition() const override {
4329 return kCondLE;
4330 }
4331
4332 protected:
4333 DEFAULT_COPY_CONSTRUCTOR(GreaterThan);
4334
4335 private:
4336 template <typename T> static bool Compute(T x, T y) { return x > y; }
4337 };
4338
4339 class HGreaterThanOrEqual final : public HCondition {
4340 public:
4341 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4342 : HCondition(kGreaterThanOrEqual, first, second, dex_pc) {
4343 }
4344
4345 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4346 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4347 }
4348 // In the following Evaluate methods, a HCompare instruction has
4349 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
4350 // `Compare(x, y) >= 0`.
4351 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4352 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
4353 }
4354 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4355 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4356 }
4357 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4358 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
4359 }
4360
4361 DECLARE_INSTRUCTION(GreaterThanOrEqual);
4362
4363 IfCondition GetCondition() const override {
4364 return kCondGE;
4365 }
4366
4367 IfCondition GetOppositeCondition() const override {
4368 return kCondLT;
4369 }
4370
4371 protected:
4372 DEFAULT_COPY_CONSTRUCTOR(GreaterThanOrEqual);
4373
4374 private:
4375 template <typename T> static bool Compute(T x, T y) { return x >= y; }
4376 };
4377
4378 class HBelow final : public HCondition {
4379 public:
4380 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4381 : HCondition(kBelow, first, second, dex_pc) {
4382 }
4383
4384 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4385 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4386 }
4387 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4388 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4389 }
4390
4391 DECLARE_INSTRUCTION(Below);
4392
4393 IfCondition GetCondition() const override {
4394 return kCondB;
4395 }
4396
4397 IfCondition GetOppositeCondition() const override {
4398 return kCondAE;
4399 }
4400
4401 protected:
4402 DEFAULT_COPY_CONSTRUCTOR(Below);
4403
4404 private:
4405 template <typename T> static bool Compute(T x, T y) {
4406 return MakeUnsigned(x) < MakeUnsigned(y);
4407 }
4408 };
4409
4410 class HBelowOrEqual final : public HCondition {
4411 public:
4412 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4413 : HCondition(kBelowOrEqual, first, second, dex_pc) {
4414 }
4415
4416 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4417 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4418 }
4419 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4420 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4421 }
4422
4423 DECLARE_INSTRUCTION(BelowOrEqual);
4424
4425 IfCondition GetCondition() const override {
4426 return kCondBE;
4427 }
4428
4429 IfCondition GetOppositeCondition() const override {
4430 return kCondA;
4431 }
4432
4433 protected:
4434 DEFAULT_COPY_CONSTRUCTOR(BelowOrEqual);
4435
4436 private:
4437 template <typename T> static bool Compute(T x, T y) {
4438 return MakeUnsigned(x) <= MakeUnsigned(y);
4439 }
4440 };
4441
4442 class HAbove final : public HCondition {
4443 public:
4444 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4445 : HCondition(kAbove, first, second, dex_pc) {
4446 }
4447
4448 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4449 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4450 }
4451 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4452 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4453 }
4454
4455 DECLARE_INSTRUCTION(Above);
4456
4457 IfCondition GetCondition() const override {
4458 return kCondA;
4459 }
4460
4461 IfCondition GetOppositeCondition() const override {
4462 return kCondBE;
4463 }
4464
4465 protected:
4466 DEFAULT_COPY_CONSTRUCTOR(Above);
4467
4468 private:
4469 template <typename T> static bool Compute(T x, T y) {
4470 return MakeUnsigned(x) > MakeUnsigned(y);
4471 }
4472 };
4473
4474 class HAboveOrEqual final : public HCondition {
4475 public:
4476 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
4477 : HCondition(kAboveOrEqual, first, second, dex_pc) {
4478 }
4479
4480 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4481 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4482 }
4483 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4484 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4485 }
4486
4487 DECLARE_INSTRUCTION(AboveOrEqual);
4488
4489 IfCondition GetCondition() const override {
4490 return kCondAE;
4491 }
4492
4493 IfCondition GetOppositeCondition() const override {
4494 return kCondB;
4495 }
4496
4497 protected:
4498 DEFAULT_COPY_CONSTRUCTOR(AboveOrEqual);
4499
4500 private:
4501 template <typename T> static bool Compute(T x, T y) {
4502 return MakeUnsigned(x) >= MakeUnsigned(y);
4503 }
4504 };
4505
4506 // Instruction to check how two inputs compare to each other.
4507 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
4508 class HCompare final : public HBinaryOperation {
4509 public:
4510 // Note that `comparison_type` is the type of comparison performed
4511 // between the comparison's inputs, not the type of the instantiated
4512 // HCompare instruction (which is always DataType::Type::kInt).
4513 HCompare(DataType::Type comparison_type,
4514 HInstruction* first,
4515 HInstruction* second,
4516 ComparisonBias bias,
4517 uint32_t dex_pc)
4518 : HBinaryOperation(kCompare,
4519 DataType::Type::kInt32,
4520 first,
4521 second,
4522 SideEffectsForArchRuntimeCalls(comparison_type),
4523 dex_pc) {
4524 SetPackedField<ComparisonBiasField>(bias);
4525 }
4526
4527 template <typename T>
4528 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
4529
4530 template <typename T>
4531 int32_t ComputeFP(T x, T y) const {
4532 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4533 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
4534 // Handle the bias.
4535 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
4536 }
4537
4538 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
4539 // Note that there is no "cmp-int" Dex instruction so we shouldn't
4540 // reach this code path when processing a freshly built HIR
4541 // graph. However HCompare integer instructions can be synthesized
4542 // by the instruction simplifier to implement IntegerCompare and
4543 // IntegerSignum intrinsics, so we have to handle this case.
4544 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4545 }
4546 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
4547 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
4548 }
4549 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
4550 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4551 }
4552 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
4553 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4554 }
4555
4556 bool InstructionDataEquals(const HInstruction* other) const override {
4557 return GetPackedFields() == other->AsCompare()->GetPackedFields();
4558 }
4559
4560 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
4561
4562 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
4563 // Only meaningful for floating-point comparisons.
4564 bool IsGtBias() const {
4565 DCHECK(DataType::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
4566 return GetBias() == ComparisonBias::kGtBias;
4567 }
4568
4569 static SideEffects SideEffectsForArchRuntimeCalls([[maybe_unused]] DataType::Type type) {
4570 // Comparisons do not require a runtime call in any back end.
4571 return SideEffects::None();
4572 }
4573
4574 DECLARE_INSTRUCTION(Compare);
4575
4576 protected:
4577 static constexpr size_t kFieldComparisonBias = kNumberOfGenericPackedBits;
4578 static constexpr size_t kFieldComparisonBiasSize =
4579 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
4580 static constexpr size_t kNumberOfComparePackedBits =
4581 kFieldComparisonBias + kFieldComparisonBiasSize;
4582 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4583 using ComparisonBiasField =
4584 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
4585
4586 // Return an integer constant containing the result of a comparison evaluated at compile time.
4587 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
4588 DCHECK(value == -1 || value == 0 || value == 1) << value;
4589 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
4590 }
4591
4592 DEFAULT_COPY_CONSTRUCTOR(Compare);
4593 };
4594
4595 class HNewInstance final : public HExpression<1> {
4596 public:
4597 HNewInstance(HInstruction* cls,
4598 uint32_t dex_pc,
4599 dex::TypeIndex type_index,
4600 const DexFile& dex_file,
4601 bool finalizable,
4602 QuickEntrypointEnum entrypoint)
4603 : HExpression(kNewInstance,
4604 DataType::Type::kReference,
4605 SideEffects::CanTriggerGC(),
4606 dex_pc),
4607 type_index_(type_index),
4608 dex_file_(dex_file),
4609 entrypoint_(entrypoint) {
4610 SetPackedFlag<kFlagFinalizable>(finalizable);
4611 SetPackedFlag<kFlagPartialMaterialization>(false);
4612 SetRawInputAt(0, cls);
4613 }
4614
4615 bool IsClonable() const override { return true; }
4616
4617 void SetPartialMaterialization() {
4618 SetPackedFlag<kFlagPartialMaterialization>(true);
4619 }
4620
4621 dex::TypeIndex GetTypeIndex() const { return type_index_; }
4622 const DexFile& GetDexFile() const { return dex_file_; }
4623
4624 // Calls runtime so needs an environment.
4625 bool NeedsEnvironment() const override { return true; }
4626
4627 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
4628 bool CanThrow() const override { return true; }
4629 bool OnlyThrowsAsyncExceptions() const override {
4630 return !IsFinalizable() && !NeedsChecks();
4631 }
4632
4633 bool NeedsChecks() const {
4634 return entrypoint_ == kQuickAllocObjectWithChecks;
4635 }
4636
4637 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
4638
4639 bool CanBeNull() const override { return false; }
4640
4641 bool IsPartialMaterialization() const {
4642 return GetPackedFlag<kFlagPartialMaterialization>();
4643 }
4644
4645 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4646
4647 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
4648 entrypoint_ = entrypoint;
4649 }
4650
4651 HLoadClass* GetLoadClass() const {
4652 HInstruction* input = InputAt(0);
4653 if (input->IsClinitCheck()) {
4654 input = input->InputAt(0);
4655 }
4656 DCHECK(input->IsLoadClass());
4657 return input->AsLoadClass();
4658 }
4659
4660 bool IsStringAlloc() const;
4661
4662 DECLARE_INSTRUCTION(NewInstance);
4663
4664 protected:
4665 DEFAULT_COPY_CONSTRUCTOR(NewInstance);
4666
4667 private:
4668 static constexpr size_t kFlagFinalizable = kNumberOfGenericPackedBits;
4669 static constexpr size_t kFlagPartialMaterialization = kFlagFinalizable + 1;
4670 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagPartialMaterialization + 1;
4671 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
4672 "Too many packed fields.");
4673
4674 const dex::TypeIndex type_index_;
4675 const DexFile& dex_file_;
4676 QuickEntrypointEnum entrypoint_;
4677 };
4678
4679 enum IntrinsicNeedsEnvironment {
4680 kNoEnvironment, // Intrinsic does not require an environment.
4681 kNeedsEnvironment // Intrinsic requires an environment.
4682 };
4683
4684 enum IntrinsicSideEffects {
4685 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
4686 kReadSideEffects, // Intrinsic may read heap memory.
4687 kWriteSideEffects, // Intrinsic may write heap memory.
4688 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
4689 };
4690
4691 enum IntrinsicExceptions {
4692 kNoThrow, // Intrinsic does not throw any exceptions.
4693 kCanThrow // Intrinsic may throw exceptions.
4694 };
4695
4696 // Determines how to load an ArtMethod*.
4697 enum class MethodLoadKind {
4698 // Use a String init ArtMethod* loaded from Thread entrypoints.
4699 kStringInit,
4700
4701 // Use the method's own ArtMethod* loaded by the register allocator.
4702 kRecursive,
4703
4704 // Use PC-relative boot image ArtMethod* address that will be known at link time.
4705 // Used for boot image methods referenced by boot image code.
4706 kBootImageLinkTimePcRelative,
4707
4708 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
4709 // Used for app->boot calls with relocatable image.
4710 kBootImageRelRo,
4711
4712 // Load from an entry in the .bss section using a PC-relative load.
4713 // Used for methods outside boot image referenced by AOT-compiled app and boot image code.
4714 kBssEntry,
4715
4716 // Use ArtMethod* at a known address, embed the direct address in the code.
4717 // Used for for JIT-compiled calls.
4718 kJitDirectAddress,
4719
4720 // Make a runtime call to resolve and call the method. This is the last-resort-kind
4721 // used when other kinds are unimplemented on a particular architecture.
4722 kRuntimeCall,
4723 };
4724
4725 // Determines the location of the code pointer of an invoke.
4726 enum class CodePtrLocation {
4727 // Recursive call, use local PC-relative call instruction.
4728 kCallSelf,
4729
4730 // Use native pointer from the Artmethod*.
4731 // Used for @CriticalNative to avoid going through the compiled stub. This call goes through
4732 // a special resolution stub if the class is not initialized or no native code is registered.
4733 kCallCriticalNative,
4734
4735 // Use code pointer from the ArtMethod*.
4736 // Used when we don't know the target code. This is also the last-resort-kind used when
4737 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
4738 kCallArtMethod,
4739 };
4740
4741 static inline bool IsPcRelativeMethodLoadKind(MethodLoadKind load_kind) {
4742 return load_kind == MethodLoadKind::kBootImageLinkTimePcRelative ||
4743 load_kind == MethodLoadKind::kBootImageRelRo ||
4744 load_kind == MethodLoadKind::kBssEntry;
4745 }
4746
4747 class HInvoke : public HVariableInputSizeInstruction {
4748 public:
4749 bool NeedsEnvironment() const override;
4750
4751 void SetArgumentAt(size_t index, HInstruction* argument) {
4752 SetRawInputAt(index, argument);
4753 }
4754
4755 // Return the number of arguments. This number can be lower than
4756 // the number of inputs returned by InputCount(), as some invoke
4757 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
4758 // inputs at the end of their list of inputs.
4759 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
4760
4761 InvokeType GetInvokeType() const {
4762 return GetPackedField<InvokeTypeField>();
4763 }
4764
4765 Intrinsics GetIntrinsic() const {
4766 return intrinsic_;
4767 }
4768
4769 void SetIntrinsic(Intrinsics intrinsic,
4770 IntrinsicNeedsEnvironment needs_env,
4771 IntrinsicSideEffects side_effects,
4772 IntrinsicExceptions exceptions);
4773
4774 bool IsFromInlinedInvoke() const {
4775 return GetEnvironment()->IsFromInlinedInvoke();
4776 }
4777
4778 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
4779
4780 bool CanThrow() const override { return GetPackedFlag<kFlagCanThrow>(); }
4781
4782 void SetAlwaysThrows(bool always_throws) { SetPackedFlag<kFlagAlwaysThrows>(always_throws); }
4783
4784 bool AlwaysThrows() const override final { return GetPackedFlag<kFlagAlwaysThrows>(); }
4785
4786 bool CanBeMoved() const override { return IsIntrinsic() && !DoesAnyWrite(); }
4787
4788 bool InstructionDataEquals(const HInstruction* other) const override {
4789 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
4790 }
4791
4792 uint32_t* GetIntrinsicOptimizations() {
4793 return &intrinsic_optimizations_;
4794 }
4795
4796 const uint32_t* GetIntrinsicOptimizations() const {
4797 return &intrinsic_optimizations_;
4798 }
4799
4800 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
4801
4802 ArtMethod* GetResolvedMethod() const { return resolved_method_; }
4803 void SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt);
4804
4805 MethodReference GetMethodReference() const { return method_reference_; }
4806
4807 const MethodReference GetResolvedMethodReference() const {
4808 return resolved_method_reference_;
4809 }
4810
4811 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
4812
4813 protected:
4814 static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
4815 static constexpr size_t kFieldInvokeTypeSize =
4816 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4817 static constexpr size_t kFlagCanThrow = kFieldInvokeType + kFieldInvokeTypeSize;
4818 static constexpr size_t kFlagAlwaysThrows = kFlagCanThrow + 1;
4819 static constexpr size_t kNumberOfInvokePackedBits = kFlagAlwaysThrows + 1;
4820 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
4821 using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
4822
4823 HInvoke(InstructionKind kind,
4824 ArenaAllocator* allocator,
4825 uint32_t number_of_arguments,
4826 uint32_t number_of_other_inputs,
4827 DataType::Type return_type,
4828 uint32_t dex_pc,
4829 MethodReference method_reference,
4830 ArtMethod* resolved_method,
4831 MethodReference resolved_method_reference,
4832 InvokeType invoke_type,
4833 bool enable_intrinsic_opt)
4834 : HVariableInputSizeInstruction(
4835 kind,
4836 return_type,
4837 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
4838 dex_pc,
4839 allocator,
4840 number_of_arguments + number_of_other_inputs,
4841 kArenaAllocInvokeInputs),
4842 number_of_arguments_(number_of_arguments),
4843 method_reference_(method_reference),
4844 resolved_method_reference_(resolved_method_reference),
4845 intrinsic_(Intrinsics::kNone),
4846 intrinsic_optimizations_(0) {
4847 SetPackedField<InvokeTypeField>(invoke_type);
4848 SetPackedFlag<kFlagCanThrow>(true);
4849 SetResolvedMethod(resolved_method, enable_intrinsic_opt);
4850 }
4851
4852 DEFAULT_COPY_CONSTRUCTOR(Invoke);
4853
4854 uint32_t number_of_arguments_;
4855 ArtMethod* resolved_method_;
4856 const MethodReference method_reference_;
4857 // Cached values of the resolved method, to avoid needing the mutator lock.
4858 const MethodReference resolved_method_reference_;
4859 Intrinsics intrinsic_;
4860
4861 // A magic word holding optimizations for intrinsics. See intrinsics.h.
4862 uint32_t intrinsic_optimizations_;
4863 };
4864
4865 class HInvokeUnresolved final : public HInvoke {
4866 public:
4867 HInvokeUnresolved(ArenaAllocator* allocator,
4868 uint32_t number_of_arguments,
4869 DataType::Type return_type,
4870 uint32_t dex_pc,
4871 MethodReference method_reference,
4872 InvokeType invoke_type)
4873 : HInvoke(kInvokeUnresolved,
4874 allocator,
4875 number_of_arguments,
4876 /* number_of_other_inputs= */ 0u,
4877 return_type,
4878 dex_pc,
4879 method_reference,
4880 nullptr,
4881 MethodReference(nullptr, 0u),
4882 invoke_type,
4883 /* enable_intrinsic_opt= */ false) {
4884 }
4885
4886 bool IsClonable() const override { return true; }
4887
4888 DECLARE_INSTRUCTION(InvokeUnresolved);
4889
4890 protected:
4891 DEFAULT_COPY_CONSTRUCTOR(InvokeUnresolved);
4892 };
4893
4894 class HInvokePolymorphic final : public HInvoke {
4895 public:
4896 HInvokePolymorphic(ArenaAllocator* allocator,
4897 uint32_t number_of_arguments,
4898 DataType::Type return_type,
4899 uint32_t dex_pc,
4900 MethodReference method_reference,
4901 // resolved_method is the ArtMethod object corresponding to the polymorphic
4902 // method (e.g. VarHandle.get), resolved using the class linker. It is needed
4903 // to pass intrinsic information to the HInvokePolymorphic node.
4904 ArtMethod* resolved_method,
4905 MethodReference resolved_method_reference,
4906 dex::ProtoIndex proto_idx)
4907 : HInvoke(kInvokePolymorphic,
4908 allocator,
4909 number_of_arguments,
4910 /* number_of_other_inputs= */ 0u,
4911 return_type,
4912 dex_pc,
4913 method_reference,
4914 resolved_method,
4915 resolved_method_reference,
4916 kPolymorphic,
4917 /* enable_intrinsic_opt= */ true),
4918 proto_idx_(proto_idx) {}
4919
4920 bool IsClonable() const override { return true; }
4921
4922 dex::ProtoIndex GetProtoIndex() { return proto_idx_; }
4923
4924 DECLARE_INSTRUCTION(InvokePolymorphic);
4925
4926 protected:
4927 dex::ProtoIndex proto_idx_;
4928 DEFAULT_COPY_CONSTRUCTOR(InvokePolymorphic);
4929 };
4930
4931 class HInvokeCustom final : public HInvoke {
4932 public:
4933 HInvokeCustom(ArenaAllocator* allocator,
4934 uint32_t number_of_arguments,
4935 uint32_t call_site_index,
4936 DataType::Type return_type,
4937 uint32_t dex_pc,
4938 MethodReference method_reference,
4939 bool enable_intrinsic_opt)
4940 : HInvoke(kInvokeCustom,
4941 allocator,
4942 number_of_arguments,
4943 /* number_of_other_inputs= */ 0u,
4944 return_type,
4945 dex_pc,
4946 method_reference,
4947 /* resolved_method= */ nullptr,
4948 MethodReference(nullptr, 0u),
4949 kStatic,
4950 enable_intrinsic_opt),
4951 call_site_index_(call_site_index) {
4952 }
4953
4954 uint32_t GetCallSiteIndex() const { return call_site_index_; }
4955
4956 bool IsClonable() const override { return true; }
4957
4958 DECLARE_INSTRUCTION(InvokeCustom);
4959
4960 protected:
4961 DEFAULT_COPY_CONSTRUCTOR(InvokeCustom);
4962
4963 private:
4964 uint32_t call_site_index_;
4965 };
4966
4967 class HInvokeStaticOrDirect final : public HInvoke {
4968 public:
4969 // Requirements of this method call regarding the class
4970 // initialization (clinit) check of its declaring class.
4971 enum class ClinitCheckRequirement { // private marker to avoid generate-operator-out.py from processing.
4972 kNone, // Class already initialized.
4973 kExplicit, // Static call having explicit clinit check as last input.
4974 kImplicit, // Static call implicitly requiring a clinit check.
4975 kLast = kImplicit
4976 };
4977
4978 struct DispatchInfo {
4979 MethodLoadKind method_load_kind;
4980 CodePtrLocation code_ptr_location;
4981 // The method load data holds
4982 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
4983 // Note that there are multiple string init methods, each having its own offset.
4984 // - the method address for kDirectAddress
4985 uint64_t method_load_data;
4986 };
4987
4988 HInvokeStaticOrDirect(ArenaAllocator* allocator,
4989 uint32_t number_of_arguments,
4990 DataType::Type return_type,
4991 uint32_t dex_pc,
4992 MethodReference method_reference,
4993 ArtMethod* resolved_method,
4994 DispatchInfo dispatch_info,
4995 InvokeType invoke_type,
4996 MethodReference resolved_method_reference,
4997 ClinitCheckRequirement clinit_check_requirement,
4998 bool enable_intrinsic_opt)
4999 : HInvoke(kInvokeStaticOrDirect,
5000 allocator,
5001 number_of_arguments,
5002 // There is potentially one extra argument for the HCurrentMethod input,
5003 // and one other if the clinit check is explicit. These can be removed later.
5004 (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) +
5005 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
5006 return_type,
5007 dex_pc,
5008 method_reference,
5009 resolved_method,
5010 resolved_method_reference,
5011 invoke_type,
5012 enable_intrinsic_opt),
5013 dispatch_info_(dispatch_info) {
5014 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
5015 }
5016
5017 bool IsClonable() const override { return true; }
5018 bool NeedsBss() const override {
5019 return GetMethodLoadKind() == MethodLoadKind::kBssEntry;
5020 }
5021
5022 void SetDispatchInfo(DispatchInfo dispatch_info) {
5023 bool had_current_method_input = HasCurrentMethodInput();
5024 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info);
5025
5026 // Using the current method is the default and once we find a better
5027 // method load kind, we should not go back to using the current method.
5028 DCHECK(had_current_method_input || !needs_current_method_input);
5029
5030 if (had_current_method_input && !needs_current_method_input) {
5031 DCHECK_EQ(InputAt(GetCurrentMethodIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
5032 RemoveInputAt(GetCurrentMethodIndex());
5033 }
5034 dispatch_info_ = dispatch_info;
5035 }
5036
5037 DispatchInfo GetDispatchInfo() const {
5038 return dispatch_info_;
5039 }
5040
5041 using HInstruction::GetInputRecords; // Keep the const version visible.
5042 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() override {
5043 ArrayRef<HUserRecord<HInstruction*>> input_records = HInvoke::GetInputRecords();
5044 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck()) {
5045 DCHECK(!input_records.empty());
5046 DCHECK_GT(input_records.size(), GetNumberOfArguments());
5047 HInstruction* last_input = input_records.back().GetInstruction();
5048 // Note: `last_input` may be null during arguments setup.
5049 if (last_input != nullptr) {
5050 // `last_input` is the last input of a static invoke marked as having
5051 // an explicit clinit check. It must either be:
5052 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
5053 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
5054 DCHECK(last_input->IsClinitCheck() || last_input->IsLoadClass()) << last_input->DebugName();
5055 }
5056 }
5057 return input_records;
5058 }
5059
5060 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
5061 // We do not access the method via object reference, so we cannot do an implicit null check.
5062 // TODO: for intrinsics we can generate implicit null checks.
5063 return false;
5064 }
5065
5066 bool CanBeNull() const override;
5067
5068 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
5069 CodePtrLocation GetCodePtrLocation() const {
5070 // We do CHA analysis after sharpening. When a method has CHA inlining, it
5071 // cannot call itself, as if the CHA optmization is invalid we want to make
5072 // sure the method is never executed again. So, while sharpening can return
5073 // kCallSelf, we bypass it here if there is a CHA optimization.
5074 if (dispatch_info_.code_ptr_location == CodePtrLocation::kCallSelf &&
5075 GetBlock()->GetGraph()->HasShouldDeoptimizeFlag()) {
5076 return CodePtrLocation::kCallArtMethod;
5077 } else {
5078 return dispatch_info_.code_ptr_location;
5079 }
5080 }
5081 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
5082 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
5083 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kJitDirectAddress; }
5084 bool HasPcRelativeMethodLoadKind() const {
5085 return IsPcRelativeMethodLoadKind(GetMethodLoadKind());
5086 }
5087
5088 QuickEntrypointEnum GetStringInitEntryPoint() const {
5089 DCHECK(IsStringInit());
5090 return static_cast<QuickEntrypointEnum>(dispatch_info_.method_load_data);
5091 }
5092
5093 uint64_t GetMethodAddress() const {
5094 DCHECK(HasMethodAddress());
5095 return dispatch_info_.method_load_data;
5096 }
5097
5098 const DexFile& GetDexFileForPcRelativeDexCache() const;
5099
5100 ClinitCheckRequirement GetClinitCheckRequirement() const {
5101 return GetPackedField<ClinitCheckRequirementField>();
5102 }
5103
5104 // Is this instruction a call to a static method?
5105 bool IsStatic() const {
5106 return GetInvokeType() == kStatic;
5107 }
5108
5109 // Does this method load kind need the current method as an input?
5110 static bool NeedsCurrentMethodInput(DispatchInfo dispatch_info) {
5111 return dispatch_info.method_load_kind == MethodLoadKind::kRecursive ||
5112 dispatch_info.method_load_kind == MethodLoadKind::kRuntimeCall ||
5113 dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative;
5114 }
5115
5116 // Get the index of the current method input.
5117 size_t GetCurrentMethodIndex() const {
5118 DCHECK(HasCurrentMethodInput());
5119 return GetCurrentMethodIndexUnchecked();
5120 }
5121 size_t GetCurrentMethodIndexUnchecked() const {
5122 return GetNumberOfArguments();
5123 }
5124
5125 // Check if the method has a current method input.
5126 bool HasCurrentMethodInput() const {
5127 if (NeedsCurrentMethodInput(GetDispatchInfo())) {
5128 DCHECK(InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5129 InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5130 return true;
5131 } else {
5132 DCHECK(InputCount() == GetCurrentMethodIndexUnchecked() ||
5133 InputAt(GetCurrentMethodIndexUnchecked()) == nullptr || // During argument setup.
5134 !InputAt(GetCurrentMethodIndexUnchecked())->IsCurrentMethod());
5135 return false;
5136 }
5137 }
5138
5139 // Get the index of the special input.
5140 size_t GetSpecialInputIndex() const {
5141 DCHECK(HasSpecialInput());
5142 return GetSpecialInputIndexUnchecked();
5143 }
5144 size_t GetSpecialInputIndexUnchecked() const {
5145 return GetNumberOfArguments() + (HasCurrentMethodInput() ? 1u : 0u);
5146 }
5147
5148 // Check if the method has a special input.
5149 bool HasSpecialInput() const {
5150 size_t other_inputs =
5151 GetSpecialInputIndexUnchecked() + (IsStaticWithExplicitClinitCheck() ? 1u : 0u);
5152 size_t input_count = InputCount();
5153 DCHECK_LE(input_count - other_inputs, 1u) << other_inputs << " " << input_count;
5154 return other_inputs != input_count;
5155 }
5156
5157 void AddSpecialInput(HInstruction* input) {
5158 // We allow only one special input.
5159 DCHECK(!HasSpecialInput());
5160 InsertInputAt(GetSpecialInputIndexUnchecked(), input);
5161 }
5162
5163 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
5164 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
5165 // instruction; only relevant for static calls with explicit clinit check.
5166 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
5167 DCHECK(IsStaticWithExplicitClinitCheck());
5168 size_t last_input_index = inputs_.size() - 1u;
5169 HInstruction* last_input = inputs_.back().GetInstruction();
5170 DCHECK(last_input != nullptr);
5171 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
5172 RemoveAsUserOfInput(last_input_index);
5173 inputs_.pop_back();
5174 SetPackedField<ClinitCheckRequirementField>(new_requirement);
5175 DCHECK(!IsStaticWithExplicitClinitCheck());
5176 }
5177
5178 // Is this a call to a static method whose declaring class has an
5179 // explicit initialization check in the graph?
5180 bool IsStaticWithExplicitClinitCheck() const {
5181 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
5182 }
5183
5184 // Is this a call to a static method whose declaring class has an
5185 // implicit intialization check requirement?
5186 bool IsStaticWithImplicitClinitCheck() const {
5187 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
5188 }
5189
5190 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
5191
5192 protected:
5193 DEFAULT_COPY_CONSTRUCTOR(InvokeStaticOrDirect);
5194
5195 private:
5196 static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
5197 static constexpr size_t kFieldClinitCheckRequirementSize =
5198 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
5199 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
5200 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
5201 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
5202 "Too many packed fields.");
5203 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
5204 kFieldClinitCheckRequirement,
5205 kFieldClinitCheckRequirementSize>;
5206
5207 DispatchInfo dispatch_info_;
5208 };
5209 std::ostream& operator<<(std::ostream& os, MethodLoadKind rhs);
5210 std::ostream& operator<<(std::ostream& os, CodePtrLocation rhs);
5211 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
5212
5213 class HInvokeVirtual final : public HInvoke {
5214 public:
5215 HInvokeVirtual(ArenaAllocator* allocator,
5216 uint32_t number_of_arguments,
5217 DataType::Type return_type,
5218 uint32_t dex_pc,
5219 MethodReference method_reference,
5220 ArtMethod* resolved_method,
5221 MethodReference resolved_method_reference,
5222 uint32_t vtable_index,
5223 bool enable_intrinsic_opt)
5224 : HInvoke(kInvokeVirtual,
5225 allocator,
5226 number_of_arguments,
5227 0u,
5228 return_type,
5229 dex_pc,
5230 method_reference,
5231 resolved_method,
5232 resolved_method_reference,
5233 kVirtual,
5234 enable_intrinsic_opt),
5235 vtable_index_(vtable_index) {
5236 }
5237
5238 bool IsClonable() const override { return true; }
5239
5240 bool CanBeNull() const override {
5241 switch (GetIntrinsic()) {
5242 case Intrinsics::kThreadCurrentThread:
5243 case Intrinsics::kStringBufferAppend:
5244 case Intrinsics::kStringBufferToString:
5245 case Intrinsics::kStringBuilderAppendObject:
5246 case Intrinsics::kStringBuilderAppendString:
5247 case Intrinsics::kStringBuilderAppendCharSequence:
5248 case Intrinsics::kStringBuilderAppendCharArray:
5249 case Intrinsics::kStringBuilderAppendBoolean:
5250 case Intrinsics::kStringBuilderAppendChar:
5251 case Intrinsics::kStringBuilderAppendInt:
5252 case Intrinsics::kStringBuilderAppendLong:
5253 case Intrinsics::kStringBuilderAppendFloat:
5254 case Intrinsics::kStringBuilderAppendDouble:
5255 case Intrinsics::kStringBuilderToString:
5256 return false;
5257 default:
5258 return HInvoke::CanBeNull();
5259 }
5260 }
5261
5262 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override;
5263
5264 uint32_t GetVTableIndex() const { return vtable_index_; }
5265
5266 DECLARE_INSTRUCTION(InvokeVirtual);
5267
5268 protected:
5269 DEFAULT_COPY_CONSTRUCTOR(InvokeVirtual);
5270
5271 private:
5272 // Cached value of the resolved method, to avoid needing the mutator lock.
5273 const uint32_t vtable_index_;
5274 };
5275
5276 class HInvokeInterface final : public HInvoke {
5277 public:
5278 HInvokeInterface(ArenaAllocator* allocator,
5279 uint32_t number_of_arguments,
5280 DataType::Type return_type,
5281 uint32_t dex_pc,
5282 MethodReference method_reference,
5283 ArtMethod* resolved_method,
5284 MethodReference resolved_method_reference,
5285 uint32_t imt_index,
5286 MethodLoadKind load_kind,
5287 bool enable_intrinsic_opt)
5288 : HInvoke(kInvokeInterface,
5289 allocator,
5290 number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0),
5291 0u,
5292 return_type,
5293 dex_pc,
5294 method_reference,
5295 resolved_method,
5296 resolved_method_reference,
5297 kInterface,
5298 enable_intrinsic_opt),
5299 imt_index_(imt_index),
5300 hidden_argument_load_kind_(load_kind) {
5301 }
5302
5303 static bool NeedsCurrentMethod(MethodLoadKind load_kind) {
5304 return load_kind == MethodLoadKind::kRecursive;
5305 }
5306
5307 bool IsClonable() const override { return true; }
5308 bool NeedsBss() const override {
5309 return GetHiddenArgumentLoadKind() == MethodLoadKind::kBssEntry;
5310 }
5311
5312 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
5313 // TODO: Add implicit null checks in intrinsics.
5314 return (obj == InputAt(0)) && !IsIntrinsic();
5315 }
5316
5317 size_t GetSpecialInputIndex() const {
5318 return GetNumberOfArguments();
5319 }
5320
5321 void AddSpecialInput(HInstruction* input) {
5322 InsertInputAt(GetSpecialInputIndex(), input);
5323 }
5324
5325 uint32_t GetImtIndex() const { return imt_index_; }
5326 MethodLoadKind GetHiddenArgumentLoadKind() const { return hidden_argument_load_kind_; }
5327
5328 DECLARE_INSTRUCTION(InvokeInterface);
5329
5330 protected:
5331 DEFAULT_COPY_CONSTRUCTOR(InvokeInterface);
5332
5333 private:
5334 // Cached value of the resolved method, to avoid needing the mutator lock.
5335 const uint32_t imt_index_;
5336
5337 // How the hidden argument (the interface method) is being loaded.
5338 const MethodLoadKind hidden_argument_load_kind_;
5339 };
5340
5341 class HNeg final : public HUnaryOperation {
5342 public:
5343 HNeg(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5344 : HUnaryOperation(kNeg, result_type, input, dex_pc) {
5345 DCHECK_EQ(result_type, DataType::Kind(input->GetType()));
5346 }
5347
5348 template <typename T> static T Compute(T x) { return -x; }
5349
5350 HConstant* Evaluate(HIntConstant* x) const override {
5351 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
5352 }
5353 HConstant* Evaluate(HLongConstant* x) const override {
5354 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
5355 }
5356 HConstant* Evaluate(HFloatConstant* x) const override {
5357 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
5358 }
5359 HConstant* Evaluate(HDoubleConstant* x) const override {
5360 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
5361 }
5362
5363 DECLARE_INSTRUCTION(Neg);
5364
5365 protected:
5366 DEFAULT_COPY_CONSTRUCTOR(Neg);
5367 };
5368
5369 class HNewArray final : public HExpression<2> {
5370 public:
5371 HNewArray(HInstruction* cls, HInstruction* length, uint32_t dex_pc, size_t component_size_shift)
5372 : HExpression(kNewArray, DataType::Type::kReference, SideEffects::CanTriggerGC(), dex_pc) {
5373 SetRawInputAt(0, cls);
5374 SetRawInputAt(1, length);
5375 SetPackedField<ComponentSizeShiftField>(component_size_shift);
5376 }
5377
5378 bool IsClonable() const override { return true; }
5379
5380 // Calls runtime so needs an environment.
5381 bool NeedsEnvironment() const override { return true; }
5382
5383 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
5384 bool CanThrow() const override { return true; }
5385
5386 bool CanBeNull() const override { return false; }
5387
5388 HLoadClass* GetLoadClass() const {
5389 DCHECK(InputAt(0)->IsLoadClass());
5390 return InputAt(0)->AsLoadClass();
5391 }
5392
5393 HInstruction* GetLength() const {
5394 return InputAt(1);
5395 }
5396
5397 size_t GetComponentSizeShift() {
5398 return GetPackedField<ComponentSizeShiftField>();
5399 }
5400
5401 DECLARE_INSTRUCTION(NewArray);
5402
5403 protected:
5404 DEFAULT_COPY_CONSTRUCTOR(NewArray);
5405
5406 private:
5407 static constexpr size_t kFieldComponentSizeShift = kNumberOfGenericPackedBits;
5408 static constexpr size_t kFieldComponentSizeShiftSize = MinimumBitsToStore(3u);
5409 static constexpr size_t kNumberOfNewArrayPackedBits =
5410 kFieldComponentSizeShift + kFieldComponentSizeShiftSize;
5411 static_assert(kNumberOfNewArrayPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5412 using ComponentSizeShiftField =
5413 BitField<size_t, kFieldComponentSizeShift, kFieldComponentSizeShiftSize>;
5414 };
5415
5416 class HAdd final : public HBinaryOperation {
5417 public:
5418 HAdd(DataType::Type result_type,
5419 HInstruction* left,
5420 HInstruction* right,
5421 uint32_t dex_pc = kNoDexPc)
5422 : HBinaryOperation(kAdd, result_type, left, right, SideEffects::None(), dex_pc) {
5423 }
5424
5425 bool IsCommutative() const override { return true; }
5426
5427 template <typename T> static T Compute(T x, T y) { return x + y; }
5428
5429 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5430 return GetBlock()->GetGraph()->GetIntConstant(
5431 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5432 }
5433 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5434 return GetBlock()->GetGraph()->GetLongConstant(
5435 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5436 }
5437 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5438 return GetBlock()->GetGraph()->GetFloatConstant(
5439 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5440 }
5441 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5442 return GetBlock()->GetGraph()->GetDoubleConstant(
5443 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5444 }
5445
5446 DECLARE_INSTRUCTION(Add);
5447
5448 protected:
5449 DEFAULT_COPY_CONSTRUCTOR(Add);
5450 };
5451
5452 class HSub final : public HBinaryOperation {
5453 public:
5454 HSub(DataType::Type result_type,
5455 HInstruction* left,
5456 HInstruction* right,
5457 uint32_t dex_pc = kNoDexPc)
5458 : HBinaryOperation(kSub, result_type, left, right, SideEffects::None(), dex_pc) {
5459 }
5460
5461 template <typename T> static T Compute(T x, T y) { return x - y; }
5462
5463 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5464 return GetBlock()->GetGraph()->GetIntConstant(
5465 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5466 }
5467 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5468 return GetBlock()->GetGraph()->GetLongConstant(
5469 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5470 }
5471 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5472 return GetBlock()->GetGraph()->GetFloatConstant(
5473 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5474 }
5475 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5476 return GetBlock()->GetGraph()->GetDoubleConstant(
5477 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5478 }
5479
5480 DECLARE_INSTRUCTION(Sub);
5481
5482 protected:
5483 DEFAULT_COPY_CONSTRUCTOR(Sub);
5484 };
5485
5486 class HMul final : public HBinaryOperation {
5487 public:
5488 HMul(DataType::Type result_type,
5489 HInstruction* left,
5490 HInstruction* right,
5491 uint32_t dex_pc = kNoDexPc)
5492 : HBinaryOperation(kMul, result_type, left, right, SideEffects::None(), dex_pc) {
5493 }
5494
5495 bool IsCommutative() const override { return true; }
5496
5497 template <typename T> static T Compute(T x, T y) { return x * y; }
5498
5499 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5500 return GetBlock()->GetGraph()->GetIntConstant(
5501 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5502 }
5503 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5504 return GetBlock()->GetGraph()->GetLongConstant(
5505 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5506 }
5507 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5508 return GetBlock()->GetGraph()->GetFloatConstant(
5509 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5510 }
5511 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5512 return GetBlock()->GetGraph()->GetDoubleConstant(
5513 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5514 }
5515
5516 DECLARE_INSTRUCTION(Mul);
5517
5518 protected:
5519 DEFAULT_COPY_CONSTRUCTOR(Mul);
5520 };
5521
5522 class HDiv final : public HBinaryOperation {
5523 public:
5524 HDiv(DataType::Type result_type,
5525 HInstruction* left,
5526 HInstruction* right,
5527 uint32_t dex_pc)
5528 : HBinaryOperation(kDiv, result_type, left, right, SideEffects::None(), dex_pc) {
5529 }
5530
5531 template <typename T>
5532 T ComputeIntegral(T x, T y) const {
5533 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5534 // Our graph structure ensures we never have 0 for `y` during
5535 // constant folding.
5536 DCHECK_NE(y, 0);
5537 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5538 return (y == -1) ? -x : x / y;
5539 }
5540
5541 template <typename T>
5542 T ComputeFP(T x, T y) const {
5543 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5544 return x / y;
5545 }
5546
5547 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5548 return GetBlock()->GetGraph()->GetIntConstant(
5549 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5550 }
5551 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5552 return GetBlock()->GetGraph()->GetLongConstant(
5553 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5554 }
5555 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5556 return GetBlock()->GetGraph()->GetFloatConstant(
5557 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5558 }
5559 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5560 return GetBlock()->GetGraph()->GetDoubleConstant(
5561 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5562 }
5563
5564 DECLARE_INSTRUCTION(Div);
5565
5566 protected:
5567 DEFAULT_COPY_CONSTRUCTOR(Div);
5568 };
5569
5570 class HRem final : public HBinaryOperation {
5571 public:
5572 HRem(DataType::Type result_type,
5573 HInstruction* left,
5574 HInstruction* right,
5575 uint32_t dex_pc)
5576 : HBinaryOperation(kRem, result_type, left, right, SideEffects::None(), dex_pc) {
5577 }
5578
5579 template <typename T>
5580 T ComputeIntegral(T x, T y) const {
5581 DCHECK(!DataType::IsFloatingPointType(GetType())) << GetType();
5582 // Our graph structure ensures we never have 0 for `y` during
5583 // constant folding.
5584 DCHECK_NE(y, 0);
5585 // Special case -1 to avoid getting a SIGFPE on x86(_64).
5586 return (y == -1) ? 0 : x % y;
5587 }
5588
5589 template <typename T>
5590 T ComputeFP(T x, T y) const {
5591 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
5592 return std::fmod(x, y);
5593 }
5594
5595 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5596 return GetBlock()->GetGraph()->GetIntConstant(
5597 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5598 }
5599 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5600 return GetBlock()->GetGraph()->GetLongConstant(
5601 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5602 }
5603 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const override {
5604 return GetBlock()->GetGraph()->GetFloatConstant(
5605 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5606 }
5607 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const override {
5608 return GetBlock()->GetGraph()->GetDoubleConstant(
5609 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
5610 }
5611
5612 DECLARE_INSTRUCTION(Rem);
5613
5614 protected:
5615 DEFAULT_COPY_CONSTRUCTOR(Rem);
5616 };
5617
5618 class HMin final : public HBinaryOperation {
5619 public:
5620 HMin(DataType::Type result_type,
5621 HInstruction* left,
5622 HInstruction* right,
5623 uint32_t dex_pc)
5624 : HBinaryOperation(kMin, result_type, left, right, SideEffects::None(), dex_pc) {}
5625
5626 bool IsCommutative() const override { return true; }
5627
5628 // Evaluation for integral values.
5629 template <typename T> static T ComputeIntegral(T x, T y) {
5630 return (x <= y) ? x : y;
5631 }
5632
5633 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5634 return GetBlock()->GetGraph()->GetIntConstant(
5635 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5636 }
5637 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5638 return GetBlock()->GetGraph()->GetLongConstant(
5639 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5640 }
5641 // TODO: Evaluation for floating-point values.
5642 HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
5643 [[maybe_unused]] HFloatConstant* y) const override {
5644 return nullptr;
5645 }
5646 HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
5647 [[maybe_unused]] HDoubleConstant* y) const override {
5648 return nullptr;
5649 }
5650
5651 DECLARE_INSTRUCTION(Min);
5652
5653 protected:
5654 DEFAULT_COPY_CONSTRUCTOR(Min);
5655 };
5656
5657 class HMax final : public HBinaryOperation {
5658 public:
5659 HMax(DataType::Type result_type,
5660 HInstruction* left,
5661 HInstruction* right,
5662 uint32_t dex_pc)
5663 : HBinaryOperation(kMax, result_type, left, right, SideEffects::None(), dex_pc) {}
5664
5665 bool IsCommutative() const override { return true; }
5666
5667 // Evaluation for integral values.
5668 template <typename T> static T ComputeIntegral(T x, T y) {
5669 return (x >= y) ? x : y;
5670 }
5671
5672 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5673 return GetBlock()->GetGraph()->GetIntConstant(
5674 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5675 }
5676 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5677 return GetBlock()->GetGraph()->GetLongConstant(
5678 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
5679 }
5680 // TODO: Evaluation for floating-point values.
5681 HConstant* Evaluate([[maybe_unused]] HFloatConstant* x,
5682 [[maybe_unused]] HFloatConstant* y) const override {
5683 return nullptr;
5684 }
5685 HConstant* Evaluate([[maybe_unused]] HDoubleConstant* x,
5686 [[maybe_unused]] HDoubleConstant* y) const override {
5687 return nullptr;
5688 }
5689
5690 DECLARE_INSTRUCTION(Max);
5691
5692 protected:
5693 DEFAULT_COPY_CONSTRUCTOR(Max);
5694 };
5695
5696 class HAbs final : public HUnaryOperation {
5697 public:
5698 HAbs(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
5699 : HUnaryOperation(kAbs, result_type, input, dex_pc) {}
5700
5701 // Evaluation for integral values.
5702 template <typename T> static T ComputeIntegral(T x) {
5703 return x < 0 ? -x : x;
5704 }
5705
5706 // Evaluation for floating-point values.
5707 // Note, as a "quality of implementation", rather than pure "spec compliance",
5708 // we require that Math.abs() clears the sign bit (but changes nothing else)
5709 // for all floating-point numbers, including NaN (signaling NaN may become quiet though).
5710 // http://b/30758343
5711 template <typename T, typename S> static T ComputeFP(T x) {
5712 S bits = bit_cast<S, T>(x);
5713 return bit_cast<T, S>(bits & std::numeric_limits<S>::max());
5714 }
5715
5716 HConstant* Evaluate(HIntConstant* x) const override {
5717 return GetBlock()->GetGraph()->GetIntConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5718 }
5719 HConstant* Evaluate(HLongConstant* x) const override {
5720 return GetBlock()->GetGraph()->GetLongConstant(ComputeIntegral(x->GetValue()), GetDexPc());
5721 }
5722 HConstant* Evaluate(HFloatConstant* x) const override {
5723 return GetBlock()->GetGraph()->GetFloatConstant(
5724 ComputeFP<float, int32_t>(x->GetValue()), GetDexPc());
5725 }
5726 HConstant* Evaluate(HDoubleConstant* x) const override {
5727 return GetBlock()->GetGraph()->GetDoubleConstant(
5728 ComputeFP<double, int64_t>(x->GetValue()), GetDexPc());
5729 }
5730
5731 DECLARE_INSTRUCTION(Abs);
5732
5733 protected:
5734 DEFAULT_COPY_CONSTRUCTOR(Abs);
5735 };
5736
5737 class HDivZeroCheck final : public HExpression<1> {
5738 public:
5739 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
5740 // constructor. However it can only do it on a fatal slow path so execution never returns to the
5741 // instruction following the current one; thus 'SideEffects::None()' is used.
5742 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
5743 : HExpression(kDivZeroCheck, value->GetType(), SideEffects::None(), dex_pc) {
5744 SetRawInputAt(0, value);
5745 }
5746
5747 bool IsClonable() const override { return true; }
5748 bool CanBeMoved() const override { return true; }
5749
5750 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
5751 return true;
5752 }
5753
5754 bool NeedsEnvironment() const override { return true; }
5755 bool CanThrow() const override { return true; }
5756
5757 DECLARE_INSTRUCTION(DivZeroCheck);
5758
5759 protected:
5760 DEFAULT_COPY_CONSTRUCTOR(DivZeroCheck);
5761 };
5762
5763 class HShl final : public HBinaryOperation {
5764 public:
5765 HShl(DataType::Type result_type,
5766 HInstruction* value,
5767 HInstruction* distance,
5768 uint32_t dex_pc = kNoDexPc)
5769 : HBinaryOperation(kShl, result_type, value, distance, SideEffects::None(), dex_pc) {
5770 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5771 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5772 }
5773
5774 template <typename T>
5775 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5776 return value << (distance & max_shift_distance);
5777 }
5778
5779 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5780 return GetBlock()->GetGraph()->GetIntConstant(
5781 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5782 }
5783 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5784 return GetBlock()->GetGraph()->GetLongConstant(
5785 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5786 }
5787
5788 DECLARE_INSTRUCTION(Shl);
5789
5790 protected:
5791 DEFAULT_COPY_CONSTRUCTOR(Shl);
5792 };
5793
5794 class HShr final : public HBinaryOperation {
5795 public:
5796 HShr(DataType::Type result_type,
5797 HInstruction* value,
5798 HInstruction* distance,
5799 uint32_t dex_pc = kNoDexPc)
5800 : HBinaryOperation(kShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5801 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5802 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5803 }
5804
5805 template <typename T>
5806 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5807 return value >> (distance & max_shift_distance);
5808 }
5809
5810 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5811 return GetBlock()->GetGraph()->GetIntConstant(
5812 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5813 }
5814 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5815 return GetBlock()->GetGraph()->GetLongConstant(
5816 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5817 }
5818
5819 DECLARE_INSTRUCTION(Shr);
5820
5821 protected:
5822 DEFAULT_COPY_CONSTRUCTOR(Shr);
5823 };
5824
5825 class HUShr final : public HBinaryOperation {
5826 public:
5827 HUShr(DataType::Type result_type,
5828 HInstruction* value,
5829 HInstruction* distance,
5830 uint32_t dex_pc = kNoDexPc)
5831 : HBinaryOperation(kUShr, result_type, value, distance, SideEffects::None(), dex_pc) {
5832 DCHECK_EQ(result_type, DataType::Kind(value->GetType()));
5833 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(distance->GetType()));
5834 }
5835
5836 template <typename T>
5837 static T Compute(T value, int32_t distance, int32_t max_shift_distance) {
5838 using V = std::make_unsigned_t<T>;
5839 V ux = static_cast<V>(value);
5840 return static_cast<T>(ux >> (distance & max_shift_distance));
5841 }
5842
5843 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5844 return GetBlock()->GetGraph()->GetIntConstant(
5845 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5846 }
5847 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5848 return GetBlock()->GetGraph()->GetLongConstant(
5849 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5850 }
5851
5852 DECLARE_INSTRUCTION(UShr);
5853
5854 protected:
5855 DEFAULT_COPY_CONSTRUCTOR(UShr);
5856 };
5857
5858 class HAnd final : public HBinaryOperation {
5859 public:
5860 HAnd(DataType::Type result_type,
5861 HInstruction* left,
5862 HInstruction* right,
5863 uint32_t dex_pc = kNoDexPc)
5864 : HBinaryOperation(kAnd, result_type, left, right, SideEffects::None(), dex_pc) {
5865 }
5866
5867 bool IsCommutative() const override { return true; }
5868
5869 template <typename T> static T Compute(T x, T y) { return x & y; }
5870
5871 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5872 return GetBlock()->GetGraph()->GetIntConstant(
5873 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5874 }
5875 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5876 return GetBlock()->GetGraph()->GetLongConstant(
5877 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5878 }
5879
5880 DECLARE_INSTRUCTION(And);
5881
5882 protected:
5883 DEFAULT_COPY_CONSTRUCTOR(And);
5884 };
5885
5886 class HOr final : public HBinaryOperation {
5887 public:
5888 HOr(DataType::Type result_type,
5889 HInstruction* left,
5890 HInstruction* right,
5891 uint32_t dex_pc = kNoDexPc)
5892 : HBinaryOperation(kOr, result_type, left, right, SideEffects::None(), dex_pc) {
5893 }
5894
5895 bool IsCommutative() const override { return true; }
5896
5897 template <typename T> static T Compute(T x, T y) { return x | y; }
5898
5899 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5900 return GetBlock()->GetGraph()->GetIntConstant(
5901 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5902 }
5903 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5904 return GetBlock()->GetGraph()->GetLongConstant(
5905 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5906 }
5907
5908 DECLARE_INSTRUCTION(Or);
5909
5910 protected:
5911 DEFAULT_COPY_CONSTRUCTOR(Or);
5912 };
5913
5914 class HXor final : public HBinaryOperation {
5915 public:
5916 HXor(DataType::Type result_type,
5917 HInstruction* left,
5918 HInstruction* right,
5919 uint32_t dex_pc = kNoDexPc)
5920 : HBinaryOperation(kXor, result_type, left, right, SideEffects::None(), dex_pc) {
5921 }
5922
5923 bool IsCommutative() const override { return true; }
5924
5925 template <typename T> static T Compute(T x, T y) { return x ^ y; }
5926
5927 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
5928 return GetBlock()->GetGraph()->GetIntConstant(
5929 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5930 }
5931 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
5932 return GetBlock()->GetGraph()->GetLongConstant(
5933 Compute(x->GetValue(), y->GetValue()), GetDexPc());
5934 }
5935
5936 DECLARE_INSTRUCTION(Xor);
5937
5938 protected:
5939 DEFAULT_COPY_CONSTRUCTOR(Xor);
5940 };
5941
5942 class HRor final : public HBinaryOperation {
5943 public:
5944 HRor(DataType::Type result_type, HInstruction* value, HInstruction* distance)
5945 : HBinaryOperation(kRor, result_type, value, distance) {
5946 }
5947
5948 template <typename T>
5949 static T Compute(T value, int32_t distance, int32_t max_shift_value) {
5950 using V = std::make_unsigned_t<T>;
5951 V ux = static_cast<V>(value);
5952 if ((distance & max_shift_value) == 0) {
5953 return static_cast<T>(ux);
5954 } else {
5955 const V reg_bits = sizeof(T) * 8;
5956 return static_cast<T>(ux >> (distance & max_shift_value)) |
5957 (value << (reg_bits - (distance & max_shift_value)));
5958 }
5959 }
5960
5961 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const override {
5962 return GetBlock()->GetGraph()->GetIntConstant(
5963 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
5964 }
5965 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const override {
5966 return GetBlock()->GetGraph()->GetLongConstant(
5967 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
5968 }
5969
5970 DECLARE_INSTRUCTION(Ror);
5971
5972 protected:
5973 DEFAULT_COPY_CONSTRUCTOR(Ror);
5974 };
5975
5976 // The value of a parameter in this method. Its location depends on
5977 // the calling convention.
5978 class HParameterValue final : public HExpression<0> {
5979 public:
5980 HParameterValue(const DexFile& dex_file,
5981 dex::TypeIndex type_index,
5982 uint8_t index,
5983 DataType::Type parameter_type,
5984 bool is_this = false)
5985 : HExpression(kParameterValue, parameter_type, SideEffects::None(), kNoDexPc),
5986 dex_file_(dex_file),
5987 type_index_(type_index),
5988 index_(index) {
5989 SetPackedFlag<kFlagIsThis>(is_this);
5990 SetPackedFlag<kFlagCanBeNull>(!is_this);
5991 }
5992
5993 const DexFile& GetDexFile() const { return dex_file_; }
5994 dex::TypeIndex GetTypeIndex() const { return type_index_; }
5995 uint8_t GetIndex() const { return index_; }
5996 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
5997
5998 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
5999 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
6000
6001 DECLARE_INSTRUCTION(ParameterValue);
6002
6003 protected:
6004 DEFAULT_COPY_CONSTRUCTOR(ParameterValue);
6005
6006 private:
6007 // Whether or not the parameter value corresponds to 'this' argument.
6008 static constexpr size_t kFlagIsThis = kNumberOfGenericPackedBits;
6009 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
6010 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
6011 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
6012 "Too many packed fields.");
6013
6014 const DexFile& dex_file_;
6015 const dex::TypeIndex type_index_;
6016 // The index of this parameter in the parameters list. Must be less
6017 // than HGraph::number_of_in_vregs_.
6018 const uint8_t index_;
6019 };
6020
6021 class HNot final : public HUnaryOperation {
6022 public:
6023 HNot(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
6024 : HUnaryOperation(kNot, result_type, input, dex_pc) {
6025 }
6026
6027 bool CanBeMoved() const override { return true; }
6028 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6029 return true;
6030 }
6031
6032 template <typename T> static T Compute(T x) { return ~x; }
6033
6034 HConstant* Evaluate(HIntConstant* x) const override {
6035 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
6036 }
6037 HConstant* Evaluate(HLongConstant* x) const override {
6038 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
6039 }
6040
6041 DECLARE_INSTRUCTION(Not);
6042
6043 protected:
6044 DEFAULT_COPY_CONSTRUCTOR(Not);
6045 };
6046
6047 class HBooleanNot final : public HUnaryOperation {
6048 public:
6049 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6050 : HUnaryOperation(kBooleanNot, DataType::Type::kBool, input, dex_pc) {
6051 }
6052
6053 bool CanBeMoved() const override { return true; }
6054 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6055 return true;
6056 }
6057
6058 template <typename T> static bool Compute(T x) {
6059 DCHECK(IsUint<1>(x)) << x;
6060 return !x;
6061 }
6062
6063 HConstant* Evaluate(HIntConstant* x) const override {
6064 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
6065 }
6066
6067 DECLARE_INSTRUCTION(BooleanNot);
6068
6069 protected:
6070 DEFAULT_COPY_CONSTRUCTOR(BooleanNot);
6071 };
6072
6073 class HTypeConversion final : public HExpression<1> {
6074 public:
6075 // Instantiate a type conversion of `input` to `result_type`.
6076 HTypeConversion(DataType::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
6077 : HExpression(kTypeConversion, result_type, SideEffects::None(), dex_pc) {
6078 SetRawInputAt(0, input);
6079 // Invariant: We should never generate a conversion to a Boolean value.
6080 DCHECK_NE(DataType::Type::kBool, result_type);
6081 }
6082
6083 HInstruction* GetInput() const { return InputAt(0); }
6084 DataType::Type GetInputType() const { return GetInput()->GetType(); }
6085 DataType::Type GetResultType() const { return GetType(); }
6086
6087 bool IsClonable() const override { return true; }
6088 bool CanBeMoved() const override { return true; }
6089 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6090 return true;
6091 }
6092 // Return whether the conversion is implicit. This includes conversion to the same type.
6093 bool IsImplicitConversion() const {
6094 return DataType::IsTypeConversionImplicit(GetInputType(), GetResultType());
6095 }
6096
6097 // Try to statically evaluate the conversion and return a HConstant
6098 // containing the result. If the input cannot be converted, return nullptr.
6099 HConstant* TryStaticEvaluation() const;
6100
6101 // Same but for `input` instead of GetInput().
6102 HConstant* TryStaticEvaluation(HInstruction* input) const;
6103
6104 DECLARE_INSTRUCTION(TypeConversion);
6105
6106 protected:
6107 DEFAULT_COPY_CONSTRUCTOR(TypeConversion);
6108 };
6109
6110 static constexpr uint32_t kNoRegNumber = -1;
6111
6112 class HNullCheck final : public HExpression<1> {
6113 public:
6114 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
6115 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6116 // instruction following the current one; thus 'SideEffects::None()' is used.
6117 HNullCheck(HInstruction* value, uint32_t dex_pc)
6118 : HExpression(kNullCheck, value->GetType(), SideEffects::None(), dex_pc) {
6119 SetRawInputAt(0, value);
6120 }
6121
6122 bool IsClonable() const override { return true; }
6123 bool CanBeMoved() const override { return true; }
6124 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6125 return true;
6126 }
6127
6128 bool NeedsEnvironment() const override { return true; }
6129
6130 bool CanThrow() const override { return true; }
6131
6132 bool CanBeNull() const override { return false; }
6133
6134 DECLARE_INSTRUCTION(NullCheck);
6135
6136 protected:
6137 DEFAULT_COPY_CONSTRUCTOR(NullCheck);
6138 };
6139
6140 // Embeds an ArtField and all the information required by the compiler. We cache
6141 // that information to avoid requiring the mutator lock every time we need it.
6142 class FieldInfo : public ValueObject {
6143 public:
6144 FieldInfo(ArtField* field,
6145 MemberOffset field_offset,
6146 DataType::Type field_type,
6147 bool is_volatile,
6148 uint32_t index,
6149 uint16_t declaring_class_def_index,
6150 const DexFile& dex_file)
6151 : field_(field),
6152 field_offset_(field_offset),
6153 field_type_(field_type),
6154 is_volatile_(is_volatile),
6155 index_(index),
6156 declaring_class_def_index_(declaring_class_def_index),
6157 dex_file_(dex_file) {}
6158
6159 ArtField* GetField() const { return field_; }
6160 MemberOffset GetFieldOffset() const { return field_offset_; }
6161 DataType::Type GetFieldType() const { return field_type_; }
6162 uint32_t GetFieldIndex() const { return index_; }
6163 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
6164 const DexFile& GetDexFile() const { return dex_file_; }
6165 bool IsVolatile() const { return is_volatile_; }
6166
6167 bool Equals(const FieldInfo& other) const {
6168 return field_ == other.field_ &&
6169 field_offset_ == other.field_offset_ &&
6170 field_type_ == other.field_type_ &&
6171 is_volatile_ == other.is_volatile_ &&
6172 index_ == other.index_ &&
6173 declaring_class_def_index_ == other.declaring_class_def_index_ &&
6174 &dex_file_ == &other.dex_file_;
6175 }
6176
6177 std::ostream& Dump(std::ostream& os) const {
6178 os << field_ << ", off: " << field_offset_ << ", type: " << field_type_
6179 << ", volatile: " << std::boolalpha << is_volatile_ << ", index_: " << std::dec << index_
6180 << ", declaring_class: " << declaring_class_def_index_ << ", dex: " << dex_file_;
6181 return os;
6182 }
6183
6184 private:
6185 ArtField* const field_;
6186 const MemberOffset field_offset_;
6187 const DataType::Type field_type_;
6188 const bool is_volatile_;
6189 const uint32_t index_;
6190 const uint16_t declaring_class_def_index_;
6191 const DexFile& dex_file_;
6192 };
6193
6194 inline bool operator==(const FieldInfo& a, const FieldInfo& b) {
6195 return a.Equals(b);
6196 }
6197
6198 inline std::ostream& operator<<(std::ostream& os, const FieldInfo& a) {
6199 return a.Dump(os);
6200 }
6201
6202 class HInstanceFieldGet final : public HExpression<1> {
6203 public:
6204 HInstanceFieldGet(HInstruction* value,
6205 ArtField* field,
6206 DataType::Type field_type,
6207 MemberOffset field_offset,
6208 bool is_volatile,
6209 uint32_t field_idx,
6210 uint16_t declaring_class_def_index,
6211 const DexFile& dex_file,
6212 uint32_t dex_pc)
6213 : HExpression(kInstanceFieldGet,
6214 field_type,
6215 SideEffects::FieldReadOfType(field_type, is_volatile),
6216 dex_pc),
6217 field_info_(field,
6218 field_offset,
6219 field_type,
6220 is_volatile,
6221 field_idx,
6222 declaring_class_def_index,
6223 dex_file) {
6224 SetRawInputAt(0, value);
6225 }
6226
6227 bool IsClonable() const override { return true; }
6228 bool CanBeMoved() const override { return !IsVolatile(); }
6229
6230 bool InstructionDataEquals(const HInstruction* other) const override {
6231 const HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
6232 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
6233 }
6234
6235 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6236 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6237 }
6238
6239 size_t ComputeHashCode() const override {
6240 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
6241 }
6242
6243 bool IsFieldAccess() const override { return true; }
6244 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6245 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6246 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6247 bool IsVolatile() const { return field_info_.IsVolatile(); }
6248
6249 void SetType(DataType::Type new_type) {
6250 DCHECK(DataType::IsIntegralType(GetType()));
6251 DCHECK(DataType::IsIntegralType(new_type));
6252 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6253 SetPackedField<TypeField>(new_type);
6254 }
6255
6256 DECLARE_INSTRUCTION(InstanceFieldGet);
6257
6258 protected:
6259 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldGet);
6260
6261 private:
6262 const FieldInfo field_info_;
6263 };
6264
6265 enum class WriteBarrierKind {
6266 // Emit the write barrier. This write barrier is not being relied on so e.g. codegen can decide to
6267 // skip it if the value stored is null. This is the default behavior.
6268 kEmitNotBeingReliedOn,
6269 // Emit the write barrier. This write barrier is being relied on and must be emitted.
6270 kEmitBeingReliedOn,
6271 // Skip emitting the write barrier. This could be set because:
6272 // A) The write barrier is not needed (i.e. it is not a reference, or the value is the null
6273 // constant)
6274 // B) This write barrier was coalesced into another one so there's no need to emit it.
6275 kDontEmit,
6276 kLast = kDontEmit
6277 };
6278 std::ostream& operator<<(std::ostream& os, WriteBarrierKind rhs);
6279
6280 class HInstanceFieldSet final : public HExpression<2> {
6281 public:
6282 HInstanceFieldSet(HInstruction* object,
6283 HInstruction* value,
6284 ArtField* field,
6285 DataType::Type field_type,
6286 MemberOffset field_offset,
6287 bool is_volatile,
6288 uint32_t field_idx,
6289 uint16_t declaring_class_def_index,
6290 const DexFile& dex_file,
6291 uint32_t dex_pc)
6292 : HExpression(kInstanceFieldSet,
6293 SideEffects::FieldWriteOfType(field_type, is_volatile),
6294 dex_pc),
6295 field_info_(field,
6296 field_offset,
6297 field_type,
6298 is_volatile,
6299 field_idx,
6300 declaring_class_def_index,
6301 dex_file) {
6302 SetPackedFlag<kFlagValueCanBeNull>(true);
6303 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
6304 SetRawInputAt(0, object);
6305 SetRawInputAt(1, value);
6306 }
6307
6308 bool IsClonable() const override { return true; }
6309
6310 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6311 return (obj == InputAt(0)) && art::CanDoImplicitNullCheckOn(GetFieldOffset().Uint32Value());
6312 }
6313
6314 bool IsFieldAccess() const override { return true; }
6315 const FieldInfo& GetFieldInfo() const override { return field_info_; }
6316 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
6317 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
6318 bool IsVolatile() const { return field_info_.IsVolatile(); }
6319 HInstruction* GetValue() const { return InputAt(1); }
6320 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6321 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
6322 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6323 void SetWriteBarrierKind(WriteBarrierKind kind) {
6324 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
6325 << "We shouldn't go back to the original value.";
6326 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
6327 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
6328 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
6329 SetPackedField<WriteBarrierKindField>(kind);
6330 }
6331
6332 DECLARE_INSTRUCTION(InstanceFieldSet);
6333
6334 protected:
6335 DEFAULT_COPY_CONSTRUCTOR(InstanceFieldSet);
6336
6337 private:
6338 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
6339 static constexpr size_t kWriteBarrierKind = kFlagValueCanBeNull + 1;
6340 static constexpr size_t kWriteBarrierKindSize =
6341 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6342 static constexpr size_t kNumberOfInstanceFieldSetPackedBits =
6343 kWriteBarrierKind + kWriteBarrierKindSize;
6344 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
6345 "Too many packed fields.");
6346
6347 const FieldInfo field_info_;
6348 using WriteBarrierKindField =
6349 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6350 };
6351
6352 class HArrayGet final : public HExpression<2> {
6353 public:
6354 HArrayGet(HInstruction* array,
6355 HInstruction* index,
6356 DataType::Type type,
6357 uint32_t dex_pc)
6358 : HArrayGet(array,
6359 index,
6360 type,
6361 SideEffects::ArrayReadOfType(type),
6362 dex_pc,
6363 /* is_string_char_at= */ false) {
6364 }
6365
6366 HArrayGet(HInstruction* array,
6367 HInstruction* index,
6368 DataType::Type type,
6369 SideEffects side_effects,
6370 uint32_t dex_pc,
6371 bool is_string_char_at)
6372 : HExpression(kArrayGet, type, side_effects, dex_pc) {
6373 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6374 SetRawInputAt(0, array);
6375 SetRawInputAt(1, index);
6376 }
6377
6378 bool IsClonable() const override { return true; }
6379 bool CanBeMoved() const override { return true; }
6380 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6381 return true;
6382 }
6383 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
6384 // TODO: We can be smarter here.
6385 // Currently, unless the array is the result of NewArray, the array access is always
6386 // preceded by some form of null NullCheck necessary for the bounds check, usually
6387 // implicit null check on the ArrayLength input to BoundsCheck or Deoptimize for
6388 // dynamic BCE. There are cases when these could be removed to produce better code.
6389 // If we ever add optimizations to do so we should allow an implicit check here
6390 // (as long as the address falls in the first page).
6391 //
6392 // As an example of such fancy optimization, we could eliminate BoundsCheck for
6393 // a = cond ? new int[1] : null;
6394 // a[0]; // The Phi does not need bounds check for either input.
6395 return false;
6396 }
6397
6398 bool IsEquivalentOf(HArrayGet* other) const {
6399 bool result = (GetDexPc() == other->GetDexPc());
6400 if (kIsDebugBuild && result) {
6401 DCHECK_EQ(GetBlock(), other->GetBlock());
6402 DCHECK_EQ(GetArray(), other->GetArray());
6403 DCHECK_EQ(GetIndex(), other->GetIndex());
6404 if (DataType::IsIntOrLongType(GetType())) {
6405 DCHECK(DataType::IsFloatingPointType(other->GetType())) << other->GetType();
6406 } else {
6407 DCHECK(DataType::IsFloatingPointType(GetType())) << GetType();
6408 DCHECK(DataType::IsIntOrLongType(other->GetType())) << other->GetType();
6409 }
6410 }
6411 return result;
6412 }
6413
6414 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6415
6416 HInstruction* GetArray() const { return InputAt(0); }
6417 HInstruction* GetIndex() const { return InputAt(1); }
6418
6419 void SetType(DataType::Type new_type) {
6420 DCHECK(DataType::IsIntegralType(GetType()));
6421 DCHECK(DataType::IsIntegralType(new_type));
6422 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
6423 SetPackedField<TypeField>(new_type);
6424 }
6425
6426 DECLARE_INSTRUCTION(ArrayGet);
6427
6428 protected:
6429 DEFAULT_COPY_CONSTRUCTOR(ArrayGet);
6430
6431 private:
6432 // We treat a String as an array, creating the HArrayGet from String.charAt()
6433 // intrinsic in the instruction simplifier. We can always determine whether
6434 // a particular HArrayGet is actually a String.charAt() by looking at the type
6435 // of the input but that requires holding the mutator lock, so we prefer to use
6436 // a flag, so that code generators don't need to do the locking.
6437 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6438 static constexpr size_t kNumberOfArrayGetPackedBits = kFlagIsStringCharAt + 1;
6439 static_assert(kNumberOfArrayGetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6440 "Too many packed fields.");
6441 };
6442
6443 class HArraySet final : public HExpression<3> {
6444 public:
6445 HArraySet(HInstruction* array,
6446 HInstruction* index,
6447 HInstruction* value,
6448 DataType::Type expected_component_type,
6449 uint32_t dex_pc)
6450 : HArraySet(array,
6451 index,
6452 value,
6453 expected_component_type,
6454 // Make a best guess for side effects now, may be refined during SSA building.
6455 ComputeSideEffects(GetComponentType(value->GetType(), expected_component_type)),
6456 dex_pc) {
6457 }
6458
6459 HArraySet(HInstruction* array,
6460 HInstruction* index,
6461 HInstruction* value,
6462 DataType::Type expected_component_type,
6463 SideEffects side_effects,
6464 uint32_t dex_pc)
6465 : HExpression(kArraySet, side_effects, dex_pc) {
6466 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
6467 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == DataType::Type::kReference);
6468 SetPackedFlag<kFlagValueCanBeNull>(true);
6469 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
6470 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
6471 SetRawInputAt(0, array);
6472 SetRawInputAt(1, index);
6473 SetRawInputAt(2, value);
6474 }
6475
6476 bool IsClonable() const override { return true; }
6477
6478 bool NeedsEnvironment() const override {
6479 // We call a runtime method to throw ArrayStoreException.
6480 return NeedsTypeCheck();
6481 }
6482
6483 // Can throw ArrayStoreException.
6484 bool CanThrow() const override { return NeedsTypeCheck(); }
6485
6486 bool CanDoImplicitNullCheckOn([[maybe_unused]] HInstruction* obj) const override {
6487 // TODO: Same as for ArrayGet.
6488 return false;
6489 }
6490
6491 void ClearTypeCheck() {
6492 SetPackedFlag<kFlagNeedsTypeCheck>(false);
6493 // Clear the `CanTriggerGC` flag too as we can only trigger a GC when doing a type check.
6494 SetSideEffects(GetSideEffects().Exclusion(SideEffects::CanTriggerGC()));
6495 // Clear the environment too as we can only throw if we need a type check.
6496 RemoveEnvironment();
6497 }
6498
6499 void ClearValueCanBeNull() {
6500 SetPackedFlag<kFlagValueCanBeNull>(false);
6501 }
6502
6503 void SetStaticTypeOfArrayIsObjectArray() {
6504 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
6505 }
6506
6507 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
6508 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
6509 bool StaticTypeOfArrayIsObjectArray() const {
6510 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
6511 }
6512
6513 HInstruction* GetArray() const { return InputAt(0); }
6514 HInstruction* GetIndex() const { return InputAt(1); }
6515 HInstruction* GetValue() const { return InputAt(2); }
6516
6517 DataType::Type GetComponentType() const {
6518 return GetComponentType(GetValue()->GetType(), GetRawExpectedComponentType());
6519 }
6520
6521 static DataType::Type GetComponentType(DataType::Type value_type,
6522 DataType::Type expected_component_type) {
6523 // The Dex format does not type floating point index operations. Since the
6524 // `expected_component_type` comes from SSA building and can therefore not
6525 // be correct, we also check what is the value type. If it is a floating
6526 // point type, we must use that type.
6527 return ((value_type == DataType::Type::kFloat32) || (value_type == DataType::Type::kFloat64))
6528 ? value_type
6529 : expected_component_type;
6530 }
6531
6532 DataType::Type GetRawExpectedComponentType() const {
6533 return GetPackedField<ExpectedComponentTypeField>();
6534 }
6535
6536 static SideEffects ComputeSideEffects(DataType::Type type) {
6537 return SideEffects::ArrayWriteOfType(type).Union(SideEffectsForArchRuntimeCalls(type));
6538 }
6539
6540 static SideEffects SideEffectsForArchRuntimeCalls(DataType::Type value_type) {
6541 return (value_type == DataType::Type::kReference) ? SideEffects::CanTriggerGC()
6542 : SideEffects::None();
6543 }
6544
6545 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
6546
6547 void SetWriteBarrierKind(WriteBarrierKind kind) {
6548 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
6549 << "We shouldn't go back to the original value.";
6550 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
6551 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
6552 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
6553 SetPackedField<WriteBarrierKindField>(kind);
6554 }
6555
6556 DECLARE_INSTRUCTION(ArraySet);
6557
6558 protected:
6559 DEFAULT_COPY_CONSTRUCTOR(ArraySet);
6560
6561 private:
6562 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
6563 static constexpr size_t kFieldExpectedComponentTypeSize =
6564 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
6565 static constexpr size_t kFlagNeedsTypeCheck =
6566 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
6567 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
6568 // Cached information for the reference_type_info_ so that codegen
6569 // does not need to inspect the static type.
6570 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
6571 static constexpr size_t kWriteBarrierKind = kFlagStaticTypeOfArrayIsObjectArray + 1;
6572 static constexpr size_t kWriteBarrierKindSize =
6573 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
6574 static constexpr size_t kNumberOfArraySetPackedBits = kWriteBarrierKind + kWriteBarrierKindSize;
6575 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6576 using ExpectedComponentTypeField =
6577 BitField<DataType::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
6578
6579 using WriteBarrierKindField =
6580 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
6581 };
6582
6583 class HArrayLength final : public HExpression<1> {
6584 public:
6585 HArrayLength(HInstruction* array, uint32_t dex_pc, bool is_string_length = false)
6586 : HExpression(kArrayLength, DataType::Type::kInt32, SideEffects::None(), dex_pc) {
6587 SetPackedFlag<kFlagIsStringLength>(is_string_length);
6588 // Note that arrays do not change length, so the instruction does not
6589 // depend on any write.
6590 SetRawInputAt(0, array);
6591 }
6592
6593 bool IsClonable() const override { return true; }
6594 bool CanBeMoved() const override { return true; }
6595 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6596 return true;
6597 }
6598 bool CanDoImplicitNullCheckOn(HInstruction* obj) const override {
6599 return obj == InputAt(0);
6600 }
6601
6602 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
6603
6604 DECLARE_INSTRUCTION(ArrayLength);
6605
6606 protected:
6607 DEFAULT_COPY_CONSTRUCTOR(ArrayLength);
6608
6609 private:
6610 // We treat a String as an array, creating the HArrayLength from String.length()
6611 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
6612 // determine whether a particular HArrayLength is actually a String.length() by
6613 // looking at the type of the input but that requires holding the mutator lock, so
6614 // we prefer to use a flag, so that code generators don't need to do the locking.
6615 static constexpr size_t kFlagIsStringLength = kNumberOfGenericPackedBits;
6616 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
6617 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6618 "Too many packed fields.");
6619 };
6620
6621 class HBoundsCheck final : public HExpression<2> {
6622 public:
6623 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
6624 // constructor. However it can only do it on a fatal slow path so execution never returns to the
6625 // instruction following the current one; thus 'SideEffects::None()' is used.
6626 HBoundsCheck(HInstruction* index,
6627 HInstruction* length,
6628 uint32_t dex_pc,
6629 bool is_string_char_at = false)
6630 : HExpression(kBoundsCheck, index->GetType(), SideEffects::None(), dex_pc) {
6631 DCHECK_EQ(DataType::Type::kInt32, DataType::Kind(index->GetType()));
6632 SetPackedFlag<kFlagIsStringCharAt>(is_string_char_at);
6633 SetRawInputAt(0, index);
6634 SetRawInputAt(1, length);
6635 }
6636
6637 bool IsClonable() const override { return true; }
6638 bool CanBeMoved() const override { return true; }
6639 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
6640 return true;
6641 }
6642
6643 bool NeedsEnvironment() const override { return true; }
6644
6645 bool CanThrow() const override { return true; }
6646
6647 bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
6648
6649 HInstruction* GetIndex() const { return InputAt(0); }
6650
6651 DECLARE_INSTRUCTION(BoundsCheck);
6652
6653 protected:
6654 DEFAULT_COPY_CONSTRUCTOR(BoundsCheck);
6655
6656 private:
6657 static constexpr size_t kFlagIsStringCharAt = kNumberOfGenericPackedBits;
6658 static constexpr size_t kNumberOfBoundsCheckPackedBits = kFlagIsStringCharAt + 1;
6659 static_assert(kNumberOfBoundsCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6660 "Too many packed fields.");
6661 };
6662
6663 class HSuspendCheck final : public HExpression<0> {
6664 public:
6665 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc, bool is_no_op = false)
6666 : HExpression(kSuspendCheck, SideEffects::CanTriggerGC(), dex_pc),
6667 slow_path_(nullptr) {
6668 SetPackedFlag<kFlagIsNoOp>(is_no_op);
6669 }
6670
6671 bool IsClonable() const override { return true; }
6672
6673 bool NeedsEnvironment() const override {
6674 return true;
6675 }
6676
6677 void SetIsNoOp(bool is_no_op) { SetPackedFlag<kFlagIsNoOp>(is_no_op); }
6678 bool IsNoOp() const { return GetPackedFlag<kFlagIsNoOp>(); }
6679
6680
6681 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
6682 SlowPathCode* GetSlowPath() const { return slow_path_; }
6683
6684 DECLARE_INSTRUCTION(SuspendCheck);
6685
6686 protected:
6687 DEFAULT_COPY_CONSTRUCTOR(SuspendCheck);
6688
6689 // True if the HSuspendCheck should not emit any code during codegen. It is
6690 // not possible to simply remove this instruction to disable codegen, as
6691 // other optimizations (e.g: CHAGuardVisitor::HoistGuard) depend on
6692 // HSuspendCheck being present in every loop.
6693 static constexpr size_t kFlagIsNoOp = kNumberOfGenericPackedBits;
6694 static constexpr size_t kNumberOfSuspendCheckPackedBits = kFlagIsNoOp + 1;
6695 static_assert(kNumberOfSuspendCheckPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6696 "Too many packed fields.");
6697
6698 private:
6699 // Only used for code generation, in order to share the same slow path between back edges
6700 // of a same loop.
6701 SlowPathCode* slow_path_;
6702 };
6703
6704 // Pseudo-instruction which doesn't generate any code.
6705 // If `emit_environment` is true, it can be used to generate an environment. It is used, for
6706 // example, to provide the native debugger with mapping information. It ensures that we can generate
6707 // line number and local variables at this point.
6708 class HNop : public HExpression<0> {
6709 public:
6710 explicit HNop(uint32_t dex_pc, bool needs_environment)
6711 : HExpression<0>(kNop, SideEffects::None(), dex_pc), needs_environment_(needs_environment) {
6712 }
6713
6714 bool NeedsEnvironment() const override {
6715 return needs_environment_;
6716 }
6717
6718 DECLARE_INSTRUCTION(Nop);
6719
6720 protected:
6721 DEFAULT_COPY_CONSTRUCTOR(Nop);
6722
6723 private:
6724 bool needs_environment_;
6725 };
6726
6727 /**
6728 * Instruction to load a Class object.
6729 */
6730 class HLoadClass final : public HInstruction {
6731 public:
6732 // Determines how to load the Class.
6733 enum class LoadKind {
6734 // We cannot load this class. See HSharpening::SharpenLoadClass.
6735 kInvalid = -1,
6736
6737 // Use the Class* from the method's own ArtMethod*.
6738 kReferrersClass,
6739
6740 // Use PC-relative boot image Class* address that will be known at link time.
6741 // Used for boot image classes referenced by boot image code.
6742 kBootImageLinkTimePcRelative,
6743
6744 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
6745 // Used for boot image classes referenced by apps in AOT-compiled code.
6746 kBootImageRelRo,
6747
6748 // Load from an app image entry in the .data.img.rel.ro using a PC-relative load.
6749 // Used for app image classes referenced by apps in AOT-compiled code.
6750 kAppImageRelRo,
6751
6752 // Load from an entry in the .bss section using a PC-relative load.
6753 // Used for classes outside boot image referenced by AOT-compiled app and boot image code.
6754 kBssEntry,
6755
6756 // Load from an entry for public class in the .bss section using a PC-relative load.
6757 // Used for classes that were unresolved during AOT-compilation outside the literal
6758 // package of the compiling class. Such classes are accessible only if they are public
6759 // and the .bss entry shall therefore be filled only if the resolved class is public.
6760 kBssEntryPublic,
6761
6762 // Load from an entry for package class in the .bss section using a PC-relative load.
6763 // Used for classes that were unresolved during AOT-compilation but within the literal
6764 // package of the compiling class. Such classes are accessible if they are public or
6765 // in the same package which, given the literal package match, requires only matching
6766 // defining class loader and the .bss entry shall therefore be filled only if at least
6767 // one of those conditions holds. Note that all code in an oat file belongs to classes
6768 // with the same defining class loader.
6769 kBssEntryPackage,
6770
6771 // Use a known boot image Class* address, embedded in the code by the codegen.
6772 // Used for boot image classes referenced by apps in JIT-compiled code.
6773 kJitBootImageAddress,
6774
6775 // Load from the root table associated with the JIT compiled method.
6776 kJitTableAddress,
6777
6778 // Load using a simple runtime call. This is the fall-back load kind when
6779 // the codegen is unable to use another appropriate kind.
6780 kRuntimeCall,
6781
6782 kLast = kRuntimeCall
6783 };
6784
6785 HLoadClass(HCurrentMethod* current_method,
6786 dex::TypeIndex type_index,
6787 const DexFile& dex_file,
6788 Handle<mirror::Class> klass,
6789 bool is_referrers_class,
6790 uint32_t dex_pc,
6791 bool needs_access_check)
6792 : HInstruction(kLoadClass,
6793 DataType::Type::kReference,
6794 SideEffectsForArchRuntimeCalls(),
6795 dex_pc),
6796 special_input_(HUserRecord<HInstruction*>(current_method)),
6797 type_index_(type_index),
6798 dex_file_(dex_file),
6799 klass_(klass) {
6800 // Referrers class should not need access check. We never inline unverified
6801 // methods so we can't possibly end up in this situation.
6802 DCHECK_IMPLIES(is_referrers_class, !needs_access_check);
6803
6804 SetPackedField<LoadKindField>(
6805 is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kRuntimeCall);
6806 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
6807 SetPackedFlag<kFlagIsInImage>(false);
6808 SetPackedFlag<kFlagGenerateClInitCheck>(false);
6809 SetPackedFlag<kFlagValidLoadedClassRTI>(false);
6810 }
6811
6812 bool IsClonable() const override { return true; }
6813
6814 void SetLoadKind(LoadKind load_kind);
6815
6816 LoadKind GetLoadKind() const {
6817 return GetPackedField<LoadKindField>();
6818 }
6819
6820 bool HasPcRelativeLoadKind() const {
6821 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6822 GetLoadKind() == LoadKind::kBootImageRelRo ||
6823 GetLoadKind() == LoadKind::kAppImageRelRo ||
6824 GetLoadKind() == LoadKind::kBssEntry ||
6825 GetLoadKind() == LoadKind::kBssEntryPublic ||
6826 GetLoadKind() == LoadKind::kBssEntryPackage;
6827 }
6828
6829 bool CanBeMoved() const override { return true; }
6830
6831 bool InstructionDataEquals(const HInstruction* other) const override;
6832
6833 size_t ComputeHashCode() const override { return type_index_.index_; }
6834
6835 bool CanBeNull() const override { return false; }
6836
6837 bool NeedsEnvironment() const override {
6838 return CanCallRuntime();
6839 }
6840 bool NeedsBss() const override {
6841 LoadKind load_kind = GetLoadKind();
6842 return load_kind == LoadKind::kBssEntry ||
6843 load_kind == LoadKind::kBssEntryPublic ||
6844 load_kind == LoadKind::kBssEntryPackage;
6845 }
6846
6847 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
6848 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
6849 }
6850
6851 bool CanCallRuntime() const {
6852 return NeedsAccessCheck() ||
6853 MustGenerateClinitCheck() ||
6854 NeedsBss() ||
6855 GetLoadKind() == LoadKind::kRuntimeCall;
6856 }
6857
6858 bool CanThrow() const override {
6859 return NeedsAccessCheck() ||
6860 MustGenerateClinitCheck() ||
6861 // If the class is in the boot or app image, the lookup in the runtime call cannot throw.
6862 ((GetLoadKind() == LoadKind::kRuntimeCall || NeedsBss()) && !IsInImage());
6863 }
6864
6865 ReferenceTypeInfo GetLoadedClassRTI() {
6866 if (GetPackedFlag<kFlagValidLoadedClassRTI>()) {
6867 // Note: The is_exact flag from the return value should not be used.
6868 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
6869 } else {
6870 return ReferenceTypeInfo::CreateInvalid();
6871 }
6872 }
6873
6874 // Loaded class RTI is marked as valid by RTP if the klass_ is admissible.
6875 void SetValidLoadedClassRTI() {
6876 DCHECK(klass_ != nullptr);
6877 SetPackedFlag<kFlagValidLoadedClassRTI>(true);
6878 }
6879
6880 dex::TypeIndex GetTypeIndex() const { return type_index_; }
6881 const DexFile& GetDexFile() const { return dex_file_; }
6882
6883 static SideEffects SideEffectsForArchRuntimeCalls() {
6884 return SideEffects::CanTriggerGC();
6885 }
6886
6887 bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
6888 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
6889 bool IsInImage() const { return GetPackedFlag<kFlagIsInImage>(); }
6890 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
6891
6892 bool MustResolveTypeOnSlowPath() const {
6893 // Check that this instruction has a slow path.
6894 LoadKind load_kind = GetLoadKind();
6895 DCHECK(load_kind != LoadKind::kRuntimeCall); // kRuntimeCall calls on main path.
6896 bool must_resolve_type_on_slow_path =
6897 load_kind == LoadKind::kBssEntry ||
6898 load_kind == LoadKind::kBssEntryPublic ||
6899 load_kind == LoadKind::kBssEntryPackage;
6900 DCHECK(must_resolve_type_on_slow_path || MustGenerateClinitCheck());
6901 return must_resolve_type_on_slow_path;
6902 }
6903
6904 void MarkInImage() {
6905 SetPackedFlag<kFlagIsInImage>(true);
6906 }
6907
6908 void AddSpecialInput(HInstruction* special_input);
6909
6910 using HInstruction::GetInputRecords; // Keep the const version visible.
6911 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
6912 return ArrayRef<HUserRecord<HInstruction*>>(
6913 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
6914 }
6915
6916 Handle<mirror::Class> GetClass() const {
6917 return klass_;
6918 }
6919
6920 DECLARE_INSTRUCTION(LoadClass);
6921
6922 protected:
6923 DEFAULT_COPY_CONSTRUCTOR(LoadClass);
6924
6925 private:
6926 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
6927 // Whether the type is in an image (boot image or app image).
6928 static constexpr size_t kFlagIsInImage = kFlagNeedsAccessCheck + 1;
6929 // Whether this instruction must generate the initialization check.
6930 // Used for code generation.
6931 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInImage + 1;
6932 static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1;
6933 static constexpr size_t kFieldLoadKindSize =
6934 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
6935 static constexpr size_t kFlagValidLoadedClassRTI = kFieldLoadKind + kFieldLoadKindSize;
6936 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagValidLoadedClassRTI + 1;
6937 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
6938 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
6939
6940 static bool HasTypeReference(LoadKind load_kind) {
6941 return load_kind == LoadKind::kReferrersClass ||
6942 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
6943 load_kind == LoadKind::kAppImageRelRo ||
6944 load_kind == LoadKind::kBssEntry ||
6945 load_kind == LoadKind::kBssEntryPublic ||
6946 load_kind == LoadKind::kBssEntryPackage ||
6947 load_kind == LoadKind::kRuntimeCall;
6948 }
6949
6950 void SetLoadKindInternal(LoadKind load_kind);
6951
6952 // The special input is the HCurrentMethod for kRuntimeCall or kReferrersClass.
6953 // For other load kinds it's empty or possibly some architecture-specific instruction
6954 // for PC-relative loads, i.e. kBssEntry* or kBootImageLinkTimePcRelative.
6955 HUserRecord<HInstruction*> special_input_;
6956
6957 // A type index and dex file where the class can be accessed. The dex file can be:
6958 // - The compiling method's dex file if the class is defined there too.
6959 // - The compiling method's dex file if the class is referenced there.
6960 // - The dex file where the class is defined. When the load kind can only be
6961 // kBssEntry* or kRuntimeCall, we cannot emit code for this `HLoadClass`.
6962 const dex::TypeIndex type_index_;
6963 const DexFile& dex_file_;
6964
6965 Handle<mirror::Class> klass_;
6966 };
6967 std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
6968
6969 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6970 inline void HLoadClass::SetLoadKind(LoadKind load_kind) {
6971 // The load kind should be determined before inserting the instruction to the graph.
6972 DCHECK(GetBlock() == nullptr);
6973 DCHECK(GetEnvironment() == nullptr);
6974 SetPackedField<LoadKindField>(load_kind);
6975 if (load_kind != LoadKind::kRuntimeCall && load_kind != LoadKind::kReferrersClass) {
6976 special_input_ = HUserRecord<HInstruction*>(nullptr);
6977 }
6978 if (!NeedsEnvironment()) {
6979 SetSideEffects(SideEffects::None());
6980 }
6981 }
6982
6983 // Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
6984 inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
6985 // The special input is used for PC-relative loads on some architectures,
6986 // including literal pool loads, which are PC-relative too.
6987 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
6988 GetLoadKind() == LoadKind::kBootImageRelRo ||
6989 GetLoadKind() == LoadKind::kAppImageRelRo ||
6990 GetLoadKind() == LoadKind::kBssEntry ||
6991 GetLoadKind() == LoadKind::kBssEntryPublic ||
6992 GetLoadKind() == LoadKind::kBssEntryPackage ||
6993 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
6994 DCHECK(special_input_.GetInstruction() == nullptr);
6995 special_input_ = HUserRecord<HInstruction*>(special_input);
6996 special_input->AddUseAt(this, 0);
6997 }
6998
6999 class HLoadString final : public HInstruction {
7000 public:
7001 // Determines how to load the String.
7002 enum class LoadKind {
7003 // Use PC-relative boot image String* address that will be known at link time.
7004 // Used for boot image strings referenced by boot image code.
7005 kBootImageLinkTimePcRelative,
7006
7007 // Load from a boot image entry in the .data.img.rel.ro using a PC-relative load.
7008 // Used for boot image strings referenced by apps in AOT-compiled code.
7009 kBootImageRelRo,
7010
7011 // Load from an entry in the .bss section using a PC-relative load.
7012 // Used for strings outside boot image referenced by AOT-compiled app and boot image code.
7013 kBssEntry,
7014
7015 // Use a known boot image String* address, embedded in the code by the codegen.
7016 // Used for boot image strings referenced by apps in JIT-compiled code.
7017 kJitBootImageAddress,
7018
7019 // Load from the root table associated with the JIT compiled method.
7020 kJitTableAddress,
7021
7022 // Load using a simple runtime call. This is the fall-back load kind when
7023 // the codegen is unable to use another appropriate kind.
7024 kRuntimeCall,
7025
7026 kLast = kRuntimeCall,
7027 };
7028
7029 HLoadString(HCurrentMethod* current_method,
7030 dex::StringIndex string_index,
7031 const DexFile& dex_file,
7032 uint32_t dex_pc)
7033 : HInstruction(kLoadString,
7034 DataType::Type::kReference,
7035 SideEffectsForArchRuntimeCalls(),
7036 dex_pc),
7037 special_input_(HUserRecord<HInstruction*>(current_method)),
7038 string_index_(string_index),
7039 dex_file_(dex_file) {
7040 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
7041 }
7042
7043 bool IsClonable() const override { return true; }
7044 bool NeedsBss() const override {
7045 return GetLoadKind() == LoadKind::kBssEntry;
7046 }
7047
7048 void SetLoadKind(LoadKind load_kind);
7049
7050 LoadKind GetLoadKind() const {
7051 return GetPackedField<LoadKindField>();
7052 }
7053
7054 bool HasPcRelativeLoadKind() const {
7055 return GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7056 GetLoadKind() == LoadKind::kBootImageRelRo ||
7057 GetLoadKind() == LoadKind::kBssEntry;
7058 }
7059
7060 const DexFile& GetDexFile() const {
7061 return dex_file_;
7062 }
7063
7064 dex::StringIndex GetStringIndex() const {
7065 return string_index_;
7066 }
7067
7068 Handle<mirror::String> GetString() const {
7069 return string_;
7070 }
7071
7072 void SetString(Handle<mirror::String> str) {
7073 string_ = str;
7074 }
7075
7076 bool CanBeMoved() const override { return true; }
7077
7078 bool InstructionDataEquals(const HInstruction* other) const override;
7079
7080 size_t ComputeHashCode() const override { return string_index_.index_; }
7081
7082 // Will call the runtime if we need to load the string through
7083 // the dex cache and the string is not guaranteed to be there yet.
7084 bool NeedsEnvironment() const override {
7085 LoadKind load_kind = GetLoadKind();
7086 if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
7087 load_kind == LoadKind::kBootImageRelRo ||
7088 load_kind == LoadKind::kJitBootImageAddress ||
7089 load_kind == LoadKind::kJitTableAddress) {
7090 return false;
7091 }
7092 return true;
7093 }
7094
7095 bool CanBeNull() const override { return false; }
7096 bool CanThrow() const override { return NeedsEnvironment(); }
7097
7098 static SideEffects SideEffectsForArchRuntimeCalls() {
7099 return SideEffects::CanTriggerGC();
7100 }
7101
7102 void AddSpecialInput(HInstruction* special_input);
7103
7104 using HInstruction::GetInputRecords; // Keep the const version visible.
7105 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7106 return ArrayRef<HUserRecord<HInstruction*>>(
7107 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7108 }
7109
7110 DECLARE_INSTRUCTION(LoadString);
7111
7112 protected:
7113 DEFAULT_COPY_CONSTRUCTOR(LoadString);
7114
7115 private:
7116 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
7117 static constexpr size_t kFieldLoadKindSize =
7118 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7119 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
7120 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7121 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7122
7123 void SetLoadKindInternal(LoadKind load_kind);
7124
7125 // The special input is the HCurrentMethod for kRuntimeCall.
7126 // For other load kinds it's empty or possibly some architecture-specific instruction
7127 // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
7128 HUserRecord<HInstruction*> special_input_;
7129
7130 dex::StringIndex string_index_;
7131 const DexFile& dex_file_;
7132
7133 Handle<mirror::String> string_;
7134 };
7135 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
7136
7137 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7138 inline void HLoadString::SetLoadKind(LoadKind load_kind) {
7139 // The load kind should be determined before inserting the instruction to the graph.
7140 DCHECK(GetBlock() == nullptr);
7141 DCHECK(GetEnvironment() == nullptr);
7142 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
7143 SetPackedField<LoadKindField>(load_kind);
7144 if (load_kind != LoadKind::kRuntimeCall) {
7145 special_input_ = HUserRecord<HInstruction*>(nullptr);
7146 }
7147 if (!NeedsEnvironment()) {
7148 SetSideEffects(SideEffects::None());
7149 }
7150 }
7151
7152 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
7153 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
7154 // The special input is used for PC-relative loads on some architectures,
7155 // including literal pool loads, which are PC-relative too.
7156 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
7157 GetLoadKind() == LoadKind::kBootImageRelRo ||
7158 GetLoadKind() == LoadKind::kBssEntry ||
7159 GetLoadKind() == LoadKind::kJitBootImageAddress) << GetLoadKind();
7160 // HLoadString::GetInputRecords() returns an empty array at this point,
7161 // so use the GetInputRecords() from the base class to set the input record.
7162 DCHECK(special_input_.GetInstruction() == nullptr);
7163 special_input_ = HUserRecord<HInstruction*>(special_input);
7164 special_input->AddUseAt(this, 0);
7165 }
7166
7167 class HLoadMethodHandle final : public HInstruction {
7168 public:
7169 HLoadMethodHandle(HCurrentMethod* current_method,
7170 uint16_t method_handle_idx,
7171 const DexFile& dex_file,
7172 uint32_t dex_pc)
7173 : HInstruction(kLoadMethodHandle,
7174 DataType::Type::kReference,
7175 SideEffectsForArchRuntimeCalls(),
7176 dex_pc),
7177 special_input_(HUserRecord<HInstruction*>(current_method)),
7178 method_handle_idx_(method_handle_idx),
7179 dex_file_(dex_file) {
7180 }
7181
7182 using HInstruction::GetInputRecords; // Keep the const version visible.
7183 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7184 return ArrayRef<HUserRecord<HInstruction*>>(
7185 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7186 }
7187
7188 bool IsClonable() const override { return true; }
7189
7190 uint16_t GetMethodHandleIndex() const { return method_handle_idx_; }
7191
7192 const DexFile& GetDexFile() const { return dex_file_; }
7193
7194 static SideEffects SideEffectsForArchRuntimeCalls() {
7195 return SideEffects::CanTriggerGC();
7196 }
7197
7198 bool CanThrow() const override { return true; }
7199
7200 bool NeedsEnvironment() const override { return true; }
7201
7202 DECLARE_INSTRUCTION(LoadMethodHandle);
7203
7204 protected:
7205 DEFAULT_COPY_CONSTRUCTOR(LoadMethodHandle);
7206
7207 private:
7208 // The special input is the HCurrentMethod for kRuntimeCall.
7209 HUserRecord<HInstruction*> special_input_;
7210
7211 const uint16_t method_handle_idx_;
7212 const DexFile& dex_file_;
7213 };
7214
7215 class HLoadMethodType final : public HInstruction {
7216 public:
7217 // Determines how to load the MethodType.
7218 enum class LoadKind {
7219 // Load from an entry in the .bss section using a PC-relative load.
7220 kBssEntry,
7221 // Load using a single runtime call.
7222 kRuntimeCall,
7223
7224 kLast = kRuntimeCall,
7225 };
7226
7227 HLoadMethodType(HCurrentMethod* current_method,
7228 dex::ProtoIndex proto_index,
7229 const DexFile& dex_file,
7230 uint32_t dex_pc)
7231 : HInstruction(kLoadMethodType,
7232 DataType::Type::kReference,
7233 SideEffectsForArchRuntimeCalls(),
7234 dex_pc),
7235 special_input_(HUserRecord<HInstruction*>(current_method)),
7236 proto_index_(proto_index),
7237 dex_file_(dex_file) {
7238 SetPackedField<LoadKindField>(LoadKind::kRuntimeCall);
7239 }
7240
7241 using HInstruction::GetInputRecords; // Keep the const version visible.
7242 ArrayRef<HUserRecord<HInstruction*>> GetInputRecords() final {
7243 return ArrayRef<HUserRecord<HInstruction*>>(
7244 &special_input_, (special_input_.GetInstruction() != nullptr) ? 1u : 0u);
7245 }
7246
7247 bool IsClonable() const override { return true; }
7248
7249 void SetLoadKind(LoadKind load_kind);
7250
7251 LoadKind GetLoadKind() const {
7252 return GetPackedField<LoadKindField>();
7253 }
7254
7255 dex::ProtoIndex GetProtoIndex() const { return proto_index_; }
7256
7257 const DexFile& GetDexFile() const { return dex_file_; }
7258
7259 static SideEffects SideEffectsForArchRuntimeCalls() {
7260 return SideEffects::CanTriggerGC();
7261 }
7262
7263 bool CanThrow() const override { return true; }
7264
7265 bool NeedsEnvironment() const override { return true; }
7266
7267 DECLARE_INSTRUCTION(LoadMethodType);
7268
7269 protected:
7270 DEFAULT_COPY_CONSTRUCTOR(LoadMethodType);
7271
7272 private:
7273 static constexpr size_t kFieldLoadKind = kNumberOfGenericPackedBits;
7274 static constexpr size_t kFieldLoadKindSize =
7275 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
7276 static constexpr size_t kNumberOfLoadMethodTypePackedBits = kFieldLoadKind + kFieldLoadKindSize;
7277 static_assert(kNumberOfLoadMethodTypePackedBits <= kMaxNumberOfPackedBits,
7278 "Too many packed fields.");
7279 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
7280
7281 // The special input is the HCurrentMethod for kRuntimeCall.
7282 HUserRecord<HInstruction*> special_input_;
7283
7284 const dex::ProtoIndex proto_index_;
7285 const DexFile& dex_file_;
7286 };
7287
7288 std::ostream& operator<<(std::ostream& os, HLoadMethodType::LoadKind rhs);
7289
7290 // Note: defined outside class to see operator<<(., HLoadMethodType::LoadKind).
7291 inline void HLoadMethodType::SetLoadKind(LoadKind load_kind) {
7292 // The load kind should be determined before inserting the instruction to the graph.
7293 DCHECK(GetBlock() == nullptr);
7294 DCHECK(GetEnvironment() == nullptr);
7295 DCHECK_EQ(GetLoadKind(), LoadKind::kRuntimeCall);
7296 SetPackedField<LoadKindField>(load_kind);
7297 }
7298
7299 /**
7300 * Performs an initialization check on its Class object input.
7301 */
7302 class HClinitCheck final : public HExpression<1> {
7303 public:
7304 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
7305 : HExpression(
7306 kClinitCheck,
7307 DataType::Type::kReference,
7308 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
7309 dex_pc) {
7310 SetRawInputAt(0, constant);
7311 }
7312 // TODO: Make ClinitCheck clonable.
7313 bool CanBeMoved() const override { return true; }
7314 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
7315 return true;
7316 }
7317
7318 bool NeedsEnvironment() const override {
7319 // May call runtime to initialize the class.
7320 return true;
7321 }
7322
7323 bool CanThrow() const override { return true; }
7324
7325 HLoadClass* GetLoadClass() const {
7326 DCHECK(InputAt(0)->IsLoadClass());
7327 return InputAt(0)->AsLoadClass();
7328 }
7329
7330 DECLARE_INSTRUCTION(ClinitCheck);
7331
7332
7333 protected:
7334 DEFAULT_COPY_CONSTRUCTOR(ClinitCheck);
7335 };
7336
7337 class HStaticFieldGet final : public HExpression<1> {
7338 public:
7339 HStaticFieldGet(HInstruction* cls,
7340 ArtField* field,
7341 DataType::Type field_type,
7342 MemberOffset field_offset,
7343 bool is_volatile,
7344 uint32_t field_idx,
7345 uint16_t declaring_class_def_index,
7346 const DexFile& dex_file,
7347 uint32_t dex_pc)
7348 : HExpression(kStaticFieldGet,
7349 field_type,
7350 SideEffects::FieldReadOfType(field_type, is_volatile),
7351 dex_pc),
7352 field_info_(field,
7353 field_offset,
7354 field_type,
7355 is_volatile,
7356 field_idx,
7357 declaring_class_def_index,
7358 dex_file) {
7359 SetRawInputAt(0, cls);
7360 }
7361
7362
7363 bool IsClonable() const override { return true; }
7364 bool CanBeMoved() const override { return !IsVolatile(); }
7365
7366 bool InstructionDataEquals(const HInstruction* other) const override {
7367 const HStaticFieldGet* other_get = other->AsStaticFieldGet();
7368 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
7369 }
7370
7371 size_t ComputeHashCode() const override {
7372 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
7373 }
7374
7375 bool IsFieldAccess() const override { return true; }
7376 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7377 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7378 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7379 bool IsVolatile() const { return field_info_.IsVolatile(); }
7380
7381 void SetType(DataType::Type new_type) {
7382 DCHECK(DataType::IsIntegralType(GetType()));
7383 DCHECK(DataType::IsIntegralType(new_type));
7384 DCHECK_EQ(DataType::Size(GetType()), DataType::Size(new_type));
7385 SetPackedField<TypeField>(new_type);
7386 }
7387
7388 DECLARE_INSTRUCTION(StaticFieldGet);
7389
7390 protected:
7391 DEFAULT_COPY_CONSTRUCTOR(StaticFieldGet);
7392
7393 private:
7394 const FieldInfo field_info_;
7395 };
7396
7397 class HStaticFieldSet final : public HExpression<2> {
7398 public:
7399 HStaticFieldSet(HInstruction* cls,
7400 HInstruction* value,
7401 ArtField* field,
7402 DataType::Type field_type,
7403 MemberOffset field_offset,
7404 bool is_volatile,
7405 uint32_t field_idx,
7406 uint16_t declaring_class_def_index,
7407 const DexFile& dex_file,
7408 uint32_t dex_pc)
7409 : HExpression(kStaticFieldSet,
7410 SideEffects::FieldWriteOfType(field_type, is_volatile),
7411 dex_pc),
7412 field_info_(field,
7413 field_offset,
7414 field_type,
7415 is_volatile,
7416 field_idx,
7417 declaring_class_def_index,
7418 dex_file) {
7419 SetPackedFlag<kFlagValueCanBeNull>(true);
7420 SetPackedField<WriteBarrierKindField>(WriteBarrierKind::kEmitNotBeingReliedOn);
7421 SetRawInputAt(0, cls);
7422 SetRawInputAt(1, value);
7423 }
7424
7425 bool IsClonable() const override { return true; }
7426 bool IsFieldAccess() const override { return true; }
7427 const FieldInfo& GetFieldInfo() const override { return field_info_; }
7428 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
7429 DataType::Type GetFieldType() const { return field_info_.GetFieldType(); }
7430 bool IsVolatile() const { return field_info_.IsVolatile(); }
7431
7432 HInstruction* GetValue() const { return InputAt(1); }
7433 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
7434 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
7435
7436 WriteBarrierKind GetWriteBarrierKind() { return GetPackedField<WriteBarrierKindField>(); }
7437 void SetWriteBarrierKind(WriteBarrierKind kind) {
7438 DCHECK(kind != WriteBarrierKind::kEmitNotBeingReliedOn)
7439 << "We shouldn't go back to the original value.";
7440 DCHECK_IMPLIES(kind == WriteBarrierKind::kDontEmit,
7441 GetWriteBarrierKind() != WriteBarrierKind::kEmitBeingReliedOn)
7442 << "If a write barrier was relied on by other write barriers, we cannot skip emitting it.";
7443 SetPackedField<WriteBarrierKindField>(kind);
7444 }
7445
7446 DECLARE_INSTRUCTION(StaticFieldSet);
7447
7448 protected:
7449 DEFAULT_COPY_CONSTRUCTOR(StaticFieldSet);
7450
7451 private:
7452 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
7453 static constexpr size_t kWriteBarrierKind = kFlagValueCanBeNull + 1;
7454 static constexpr size_t kWriteBarrierKindSize =
7455 MinimumBitsToStore(static_cast<size_t>(WriteBarrierKind::kLast));
7456 static constexpr size_t kNumberOfStaticFieldSetPackedBits =
7457 kWriteBarrierKind + kWriteBarrierKindSize;
7458 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
7459 "Too many packed fields.");
7460
7461 const FieldInfo field_info_;
7462 using WriteBarrierKindField =
7463 BitField<WriteBarrierKind, kWriteBarrierKind, kWriteBarrierKindSize>;
7464 };
7465
7466 class HStringBuilderAppend final : public HVariableInputSizeInstruction {
7467 public:
7468 HStringBuilderAppend(HIntConstant* format,
7469 uint32_t number_of_arguments,
7470 bool has_fp_args,
7471 ArenaAllocator* allocator,
7472 uint32_t dex_pc)
7473 : HVariableInputSizeInstruction(
7474 kStringBuilderAppend,
7475 DataType::Type::kReference,
7476 SideEffects::CanTriggerGC().Union(
7477 // The runtime call may read memory from inputs. It never writes outside
7478 // of the newly allocated result object or newly allocated helper objects,
7479 // except for float/double arguments where we reuse thread-local helper objects.
7480 has_fp_args ? SideEffects::AllWritesAndReads() : SideEffects::AllReads()),
7481 dex_pc,
7482 allocator,
7483 number_of_arguments + /* format */ 1u,
7484 kArenaAllocInvokeInputs) {
7485 DCHECK_GE(number_of_arguments, 1u); // There must be something to append.
7486 SetRawInputAt(FormatIndex(), format);
7487 }
7488
7489 void SetArgumentAt(size_t index, HInstruction* argument) {
7490 DCHECK_LE(index, GetNumberOfArguments());
7491 SetRawInputAt(index, argument);
7492 }
7493
7494 // Return the number of arguments, excluding the format.
7495 size_t GetNumberOfArguments() const {
7496 DCHECK_GE(InputCount(), 1u);
7497 return InputCount() - 1u;
7498 }
7499
7500 size_t FormatIndex() const {
7501 return GetNumberOfArguments();
7502 }
7503
7504 HIntConstant* GetFormat() {
7505 return InputAt(FormatIndex())->AsIntConstant();
7506 }
7507
7508 bool NeedsEnvironment() const override { return true; }
7509
7510 bool CanThrow() const override { return true; }
7511
7512 bool CanBeNull() const override { return false; }
7513
7514 DECLARE_INSTRUCTION(StringBuilderAppend);
7515
7516 protected:
7517 DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend);
7518 };
7519
7520 class HUnresolvedInstanceFieldGet final : public HExpression<1> {
7521 public:
7522 HUnresolvedInstanceFieldGet(HInstruction* obj,
7523 DataType::Type field_type,
7524 uint32_t field_index,
7525 uint32_t dex_pc)
7526 : HExpression(kUnresolvedInstanceFieldGet,
7527 field_type,
7528 SideEffects::AllExceptGCDependency(),
7529 dex_pc),
7530 field_index_(field_index) {
7531 SetRawInputAt(0, obj);
7532 }
7533
7534 bool IsClonable() const override { return true; }
7535 bool NeedsEnvironment() const override { return true; }
7536 bool CanThrow() const override { return true; }
7537
7538 DataType::Type GetFieldType() const { return GetType(); }
7539 uint32_t GetFieldIndex() const { return field_index_; }
7540
7541 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
7542
7543 protected:
7544 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldGet);
7545
7546 private:
7547 const uint32_t field_index_;
7548 };
7549
7550 class HUnresolvedInstanceFieldSet final : public HExpression<2> {
7551 public:
7552 HUnresolvedInstanceFieldSet(HInstruction* obj,
7553 HInstruction* value,
7554 DataType::Type field_type,
7555 uint32_t field_index,
7556 uint32_t dex_pc)
7557 : HExpression(kUnresolvedInstanceFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7558 field_index_(field_index) {
7559 SetPackedField<FieldTypeField>(field_type);
7560 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7561 SetRawInputAt(0, obj);
7562 SetRawInputAt(1, value);
7563 }
7564
7565 bool IsClonable() const override { return true; }
7566 bool NeedsEnvironment() const override { return true; }
7567 bool CanThrow() const override { return true; }
7568
7569 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7570 uint32_t GetFieldIndex() const { return field_index_; }
7571
7572 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
7573
7574 protected:
7575 DEFAULT_COPY_CONSTRUCTOR(UnresolvedInstanceFieldSet);
7576
7577 private:
7578 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7579 static constexpr size_t kFieldFieldTypeSize =
7580 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7581 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7582 kFieldFieldType + kFieldFieldTypeSize;
7583 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7584 "Too many packed fields.");
7585 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7586
7587 const uint32_t field_index_;
7588 };
7589
7590 class HUnresolvedStaticFieldGet final : public HExpression<0> {
7591 public:
7592 HUnresolvedStaticFieldGet(DataType::Type field_type,
7593 uint32_t field_index,
7594 uint32_t dex_pc)
7595 : HExpression(kUnresolvedStaticFieldGet,
7596 field_type,
7597 SideEffects::AllExceptGCDependency(),
7598 dex_pc),
7599 field_index_(field_index) {
7600 }
7601
7602 bool IsClonable() const override { return true; }
7603 bool NeedsEnvironment() const override { return true; }
7604 bool CanThrow() const override { return true; }
7605
7606 DataType::Type GetFieldType() const { return GetType(); }
7607 uint32_t GetFieldIndex() const { return field_index_; }
7608
7609 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
7610
7611 protected:
7612 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldGet);
7613
7614 private:
7615 const uint32_t field_index_;
7616 };
7617
7618 class HUnresolvedStaticFieldSet final : public HExpression<1> {
7619 public:
7620 HUnresolvedStaticFieldSet(HInstruction* value,
7621 DataType::Type field_type,
7622 uint32_t field_index,
7623 uint32_t dex_pc)
7624 : HExpression(kUnresolvedStaticFieldSet, SideEffects::AllExceptGCDependency(), dex_pc),
7625 field_index_(field_index) {
7626 SetPackedField<FieldTypeField>(field_type);
7627 DCHECK_EQ(DataType::Kind(field_type), DataType::Kind(value->GetType()));
7628 SetRawInputAt(0, value);
7629 }
7630
7631 bool IsClonable() const override { return true; }
7632 bool NeedsEnvironment() const override { return true; }
7633 bool CanThrow() const override { return true; }
7634
7635 DataType::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
7636 uint32_t GetFieldIndex() const { return field_index_; }
7637
7638 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
7639
7640 protected:
7641 DEFAULT_COPY_CONSTRUCTOR(UnresolvedStaticFieldSet);
7642
7643 private:
7644 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
7645 static constexpr size_t kFieldFieldTypeSize =
7646 MinimumBitsToStore(static_cast<size_t>(DataType::Type::kLast));
7647 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
7648 kFieldFieldType + kFieldFieldTypeSize;
7649 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
7650 "Too many packed fields.");
7651 using FieldTypeField = BitField<DataType::Type, kFieldFieldType, kFieldFieldTypeSize>;
7652
7653 const uint32_t field_index_;
7654 };
7655
7656 // Implement the move-exception DEX instruction.
7657 class HLoadException final : public HExpression<0> {
7658 public:
7659 explicit HLoadException(uint32_t dex_pc = kNoDexPc)
7660 : HExpression(kLoadException, DataType::Type::kReference, SideEffects::None(), dex_pc) {
7661 }
7662
7663 bool CanBeNull() const override { return false; }
7664
7665 DECLARE_INSTRUCTION(LoadException);
7666
7667 protected:
7668 DEFAULT_COPY_CONSTRUCTOR(LoadException);
7669 };
7670
7671 // Implicit part of move-exception which clears thread-local exception storage.
7672 // Must not be removed because the runtime expects the TLS to get cleared.
7673 class HClearException final : public HExpression<0> {
7674 public:
7675 explicit HClearException(uint32_t dex_pc = kNoDexPc)
7676 : HExpression(kClearException, SideEffects::AllWrites(), dex_pc) {
7677 }
7678
7679 DECLARE_INSTRUCTION(ClearException);
7680
7681 protected:
7682 DEFAULT_COPY_CONSTRUCTOR(ClearException);
7683 };
7684
7685 class HThrow final : public HExpression<1> {
7686 public:
7687 HThrow(HInstruction* exception, uint32_t dex_pc)
7688 : HExpression(kThrow, SideEffects::CanTriggerGC(), dex_pc) {
7689 SetRawInputAt(0, exception);
7690 }
7691
7692 bool IsControlFlow() const override { return true; }
7693
7694 bool NeedsEnvironment() const override { return true; }
7695
7696 bool CanThrow() const override { return true; }
7697
7698 bool AlwaysThrows() const override { return true; }
7699
7700 DECLARE_INSTRUCTION(Throw);
7701
7702 protected:
7703 DEFAULT_COPY_CONSTRUCTOR(Throw);
7704 };
7705
7706 /**
7707 * Implementation strategies for the code generator of a HInstanceOf
7708 * or `HCheckCast`.
7709 */
7710 enum class TypeCheckKind { // private marker to avoid generate-operator-out.py from processing.
7711 kUnresolvedCheck, // Check against an unresolved type.
7712 kExactCheck, // Can do a single class compare.
7713 kClassHierarchyCheck, // Can just walk the super class chain.
7714 kAbstractClassCheck, // Can just walk the super class chain, starting one up.
7715 kInterfaceCheck, // No optimization yet when checking against an interface.
7716 kArrayObjectCheck, // Can just check if the array is not primitive.
7717 kArrayCheck, // No optimization yet when checking against a generic array.
7718 kBitstringCheck, // Compare the type check bitstring.
7719 kLast = kArrayCheck
7720 };
7721
7722 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
7723
7724 // Note: HTypeCheckInstruction is just a helper class, not an abstract instruction with an
7725 // `IsTypeCheckInstruction()`. (New virtual methods in the HInstruction class have a high cost.)
7726 class HTypeCheckInstruction : public HVariableInputSizeInstruction {
7727 public:
7728 HTypeCheckInstruction(InstructionKind kind,
7729 DataType::Type type,
7730 HInstruction* object,
7731 HInstruction* target_class_or_null,
7732 TypeCheckKind check_kind,
7733 Handle<mirror::Class> klass,
7734 uint32_t dex_pc,
7735 ArenaAllocator* allocator,
7736 HIntConstant* bitstring_path_to_root,
7737 HIntConstant* bitstring_mask,
7738 SideEffects side_effects)
7739 : HVariableInputSizeInstruction(
7740 kind,
7741 type,
7742 side_effects,
7743 dex_pc,
7744 allocator,
7745 /* number_of_inputs= */ check_kind == TypeCheckKind::kBitstringCheck ? 4u : 2u,
7746 kArenaAllocTypeCheckInputs),
7747 klass_(klass) {
7748 SetPackedField<TypeCheckKindField>(check_kind);
7749 SetPackedFlag<kFlagMustDoNullCheck>(true);
7750 SetPackedFlag<kFlagValidTargetClassRTI>(false);
7751 SetRawInputAt(0, object);
7752 SetRawInputAt(1, target_class_or_null);
7753 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_path_to_root != nullptr);
7754 DCHECK_EQ(check_kind == TypeCheckKind::kBitstringCheck, bitstring_mask != nullptr);
7755 if (check_kind == TypeCheckKind::kBitstringCheck) {
7756 DCHECK(target_class_or_null->IsNullConstant());
7757 SetRawInputAt(2, bitstring_path_to_root);
7758 SetRawInputAt(3, bitstring_mask);
7759 } else {
7760 DCHECK(target_class_or_null->IsLoadClass());
7761 }
7762 }
7763
7764 HLoadClass* GetTargetClass() const {
7765 DCHECK_NE(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7766 HInstruction* load_class = InputAt(1);
7767 DCHECK(load_class->IsLoadClass());
7768 return load_class->AsLoadClass();
7769 }
7770
7771 uint32_t GetBitstringPathToRoot() const {
7772 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7773 HInstruction* path_to_root = InputAt(2);
7774 DCHECK(path_to_root->IsIntConstant());
7775 return static_cast<uint32_t>(path_to_root->AsIntConstant()->GetValue());
7776 }
7777
7778 uint32_t GetBitstringMask() const {
7779 DCHECK_EQ(GetTypeCheckKind(), TypeCheckKind::kBitstringCheck);
7780 HInstruction* mask = InputAt(3);
7781 DCHECK(mask->IsIntConstant());
7782 return static_cast<uint32_t>(mask->AsIntConstant()->GetValue());
7783 }
7784
7785 bool IsClonable() const override { return true; }
7786 bool CanBeMoved() const override { return true; }
7787
7788 bool InstructionDataEquals(const HInstruction* other) const override {
7789 DCHECK(other->IsInstanceOf() || other->IsCheckCast()) << other->DebugName();
7790 return GetPackedFields() == down_cast<const HTypeCheckInstruction*>(other)->GetPackedFields();
7791 }
7792
7793 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
7794 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
7795 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
7796 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
7797
7798 ReferenceTypeInfo GetTargetClassRTI() {
7799 if (GetPackedFlag<kFlagValidTargetClassRTI>()) {
7800 // Note: The is_exact flag from the return value should not be used.
7801 return ReferenceTypeInfo::CreateUnchecked(klass_, /* is_exact= */ true);
7802 } else {
7803 return ReferenceTypeInfo::CreateInvalid();
7804 }
7805 }
7806
7807 // Target class RTI is marked as valid by RTP if the klass_ is admissible.
7808 void SetValidTargetClassRTI() {
7809 DCHECK(klass_ != nullptr);
7810 SetPackedFlag<kFlagValidTargetClassRTI>(true);
7811 }
7812
7813 Handle<mirror::Class> GetClass() const {
7814 return klass_;
7815 }
7816
7817 protected:
7818 DEFAULT_COPY_CONSTRUCTOR(TypeCheckInstruction);
7819
7820 private:
7821 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
7822 static constexpr size_t kFieldTypeCheckKindSize =
7823 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
7824 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
7825 static constexpr size_t kFlagValidTargetClassRTI = kFlagMustDoNullCheck + 1;
7826 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagValidTargetClassRTI + 1;
7827 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7828 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
7829
7830 Handle<mirror::Class> klass_;
7831 };
7832
7833 class HInstanceOf final : public HTypeCheckInstruction {
7834 public:
7835 HInstanceOf(HInstruction* object,
7836 HInstruction* target_class_or_null,
7837 TypeCheckKind check_kind,
7838 Handle<mirror::Class> klass,
7839 uint32_t dex_pc,
7840 ArenaAllocator* allocator,
7841 HIntConstant* bitstring_path_to_root,
7842 HIntConstant* bitstring_mask)
7843 : HTypeCheckInstruction(kInstanceOf,
7844 DataType::Type::kBool,
7845 object,
7846 target_class_or_null,
7847 check_kind,
7848 klass,
7849 dex_pc,
7850 allocator,
7851 bitstring_path_to_root,
7852 bitstring_mask,
7853 SideEffectsForArchRuntimeCalls(check_kind)) {}
7854
7855 bool IsClonable() const override { return true; }
7856
7857 bool NeedsEnvironment() const override {
7858 return CanCallRuntime(GetTypeCheckKind());
7859 }
7860
7861 static bool CanCallRuntime(TypeCheckKind check_kind) {
7862 // TODO: Re-evaluate now that mips codegen has been removed.
7863 return check_kind != TypeCheckKind::kExactCheck;
7864 }
7865
7866 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
7867 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
7868 }
7869
7870 DECLARE_INSTRUCTION(InstanceOf);
7871
7872 protected:
7873 DEFAULT_COPY_CONSTRUCTOR(InstanceOf);
7874 };
7875
7876 class HBoundType final : public HExpression<1> {
7877 public:
7878 explicit HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
7879 : HExpression(kBoundType, DataType::Type::kReference, SideEffects::None(), dex_pc),
7880 upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
7881 SetPackedFlag<kFlagUpperCanBeNull>(true);
7882 SetPackedFlag<kFlagCanBeNull>(true);
7883 DCHECK_EQ(input->GetType(), DataType::Type::kReference);
7884 SetRawInputAt(0, input);
7885 }
7886
7887 bool InstructionDataEquals(const HInstruction* other) const override;
7888 bool IsClonable() const override { return true; }
7889
7890 // {Get,Set}Upper* should only be used in reference type propagation.
7891 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
7892 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
7893 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
7894
7895 void SetCanBeNull(bool can_be_null) {
7896 DCHECK(GetUpperCanBeNull() || !can_be_null);
7897 SetPackedFlag<kFlagCanBeNull>(can_be_null);
7898 }
7899
7900 bool CanBeNull() const override { return GetPackedFlag<kFlagCanBeNull>(); }
7901
7902 DECLARE_INSTRUCTION(BoundType);
7903
7904 protected:
7905 DEFAULT_COPY_CONSTRUCTOR(BoundType);
7906
7907 private:
7908 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
7909 // is false then CanBeNull() cannot be true).
7910 static constexpr size_t kFlagUpperCanBeNull = kNumberOfGenericPackedBits;
7911 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
7912 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
7913 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
7914
7915 // Encodes the most upper class that this instruction can have. In other words
7916 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
7917 // It is used to bound the type in cases like:
7918 // if (x instanceof ClassX) {
7919 // // uper_bound_ will be ClassX
7920 // }
7921 ReferenceTypeInfo upper_bound_;
7922 };
7923
7924 class HCheckCast final : public HTypeCheckInstruction {
7925 public:
7926 HCheckCast(HInstruction* object,
7927 HInstruction* target_class_or_null,
7928 TypeCheckKind check_kind,
7929 Handle<mirror::Class> klass,
7930 uint32_t dex_pc,
7931 ArenaAllocator* allocator,
7932 HIntConstant* bitstring_path_to_root,
7933 HIntConstant* bitstring_mask)
7934 : HTypeCheckInstruction(kCheckCast,
7935 DataType::Type::kVoid,
7936 object,
7937 target_class_or_null,
7938 check_kind,
7939 klass,
7940 dex_pc,
7941 allocator,
7942 bitstring_path_to_root,
7943 bitstring_mask,
7944 SideEffects::CanTriggerGC()) {}
7945
7946 bool IsClonable() const override { return true; }
7947 bool NeedsEnvironment() const override {
7948 // Instruction may throw a CheckCastError.
7949 return true;
7950 }
7951
7952 bool CanThrow() const override { return true; }
7953
7954 DECLARE_INSTRUCTION(CheckCast);
7955
7956 protected:
7957 DEFAULT_COPY_CONSTRUCTOR(CheckCast);
7958 };
7959
7960 /**
7961 * @brief Memory barrier types (see "The JSR-133 Cookbook for Compiler Writers").
7962 * @details We define the combined barrier types that are actually required
7963 * by the Java Memory Model, rather than using exactly the terminology from
7964 * the JSR-133 cookbook. These should, in many cases, be replaced by acquire/release
7965 * primitives. Note that the JSR-133 cookbook generally does not deal with
7966 * store atomicity issues, and the recipes there are not always entirely sufficient.
7967 * The current recipe is as follows:
7968 * -# Use AnyStore ~= (LoadStore | StoreStore) ~= release barrier before volatile store.
7969 * -# Use AnyAny barrier after volatile store. (StoreLoad is as expensive.)
7970 * -# Use LoadAny barrier ~= (LoadLoad | LoadStore) ~= acquire barrier after each volatile load.
7971 * -# Use StoreStore barrier after all stores but before return from any constructor whose
7972 * class has final fields.
7973 * -# Use NTStoreStore to order non-temporal stores with respect to all later
7974 * store-to-memory instructions. Only generated together with non-temporal stores.
7975 */
7976 enum MemBarrierKind {
7977 kAnyStore,
7978 kLoadAny,
7979 kStoreStore,
7980 kAnyAny,
7981 kNTStoreStore,
7982 kLastBarrierKind = kNTStoreStore
7983 };
7984 std::ostream& operator<<(std::ostream& os, MemBarrierKind kind);
7985
7986 class HMemoryBarrier final : public HExpression<0> {
7987 public:
7988 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
7989 : HExpression(kMemoryBarrier,
7990 SideEffects::AllWritesAndReads(), // Assume write/read on all fields/arrays.
7991 dex_pc) {
7992 SetPackedField<BarrierKindField>(barrier_kind);
7993 }
7994
7995 bool IsClonable() const override { return true; }
7996
7997 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
7998
7999 DECLARE_INSTRUCTION(MemoryBarrier);
8000
8001 protected:
8002 DEFAULT_COPY_CONSTRUCTOR(MemoryBarrier);
8003
8004 private:
8005 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
8006 static constexpr size_t kFieldBarrierKindSize =
8007 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
8008 static constexpr size_t kNumberOfMemoryBarrierPackedBits =
8009 kFieldBarrierKind + kFieldBarrierKindSize;
8010 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
8011 "Too many packed fields.");
8012 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
8013 };
8014
8015 // A constructor fence orders all prior stores to fields that could be accessed via a final field of
8016 // the specified object(s), with respect to any subsequent store that might "publish"
8017 // (i.e. make visible) the specified object to another thread.
8018 //
8019 // JLS 17.5.1 "Semantics of final fields" states that a freeze action happens
8020 // for all final fields (that were set) at the end of the invoked constructor.
8021 //
8022 // The constructor fence models the freeze actions for the final fields of an object
8023 // being constructed (semantically at the end of the constructor). Constructor fences
8024 // have a per-object affinity; two separate objects being constructed get two separate
8025 // constructor fences.
8026 //
8027 // (Note: that if calling a super-constructor or forwarding to another constructor,
8028 // the freezes would happen at the end of *that* constructor being invoked).
8029 //
8030 // The memory model guarantees that when the object being constructed is "published" after
8031 // constructor completion (i.e. escapes the current thread via a store), then any final field
8032 // writes must be observable on other threads (once they observe that publication).
8033 //
8034 // Further, anything written before the freeze, and read by dereferencing through the final field,
8035 // must also be visible (so final object field could itself have an object with non-final fields;
8036 // yet the freeze must also extend to them).
8037 //
8038 // Constructor example:
8039 //
8040 // class HasFinal {
8041 // final int field; Optimizing IR for <init>()V:
8042 // HasFinal() {
8043 // field = 123; HInstanceFieldSet(this, HasFinal.field, 123)
8044 // // freeze(this.field); HConstructorFence(this)
8045 // } HReturn
8046 // }
8047 //
8048 // HConstructorFence can serve double duty as a fence for new-instance/new-array allocations of
8049 // already-initialized classes; in that case the allocation must act as a "default-initializer"
8050 // of the object which effectively writes the class pointer "final field".
8051 //
8052 // For example, we can model default-initialiation as roughly the equivalent of the following:
8053 //
8054 // class Object {
8055 // private final Class header;
8056 // }
8057 //
8058 // Java code: Optimizing IR:
8059 //
8060 // T new_instance<T>() {
8061 // Object obj = allocate_memory(T.class.size); obj = HInvoke(art_quick_alloc_object, T)
8062 // obj.header = T.class; // header write is done by above call.
8063 // // freeze(obj.header) HConstructorFence(obj)
8064 // return (T)obj;
8065 // }
8066 //
8067 // See also:
8068 // * DexCompilationUnit::RequiresConstructorBarrier
8069 // * QuasiAtomic::ThreadFenceForConstructor
8070 //
8071 class HConstructorFence final : public HVariableInputSizeInstruction {
8072 // A fence has variable inputs because the inputs can be removed
8073 // after prepare_for_register_allocation phase.
8074 // (TODO: In the future a fence could freeze multiple objects
8075 // after merging two fences together.)
8076 public:
8077 // `fence_object` is the reference that needs to be protected for correct publication.
8078 //
8079 // It makes sense in the following situations:
8080 // * <init> constructors, it's the "this" parameter (i.e. HParameterValue, s.t. IsThis() == true).
8081 // * new-instance-like instructions, it's the return value (i.e. HNewInstance).
8082 //
8083 // After construction the `fence_object` becomes the 0th input.
8084 // This is not an input in a real sense, but just a convenient place to stash the information
8085 // about the associated object.
8086 HConstructorFence(HInstruction* fence_object,
8087 uint32_t dex_pc,
8088 ArenaAllocator* allocator)
8089 // We strongly suspect there is not a more accurate way to describe the fine-grained reordering
8090 // constraints described in the class header. We claim that these SideEffects constraints
8091 // enforce a superset of the real constraints.
8092 //
8093 // The ordering described above is conservatively modeled with SideEffects as follows:
8094 //
8095 // * To prevent reordering of the publication stores:
8096 // ----> "Reads of objects" is the initial SideEffect.
8097 // * For every primitive final field store in the constructor:
8098 // ----> Union that field's type as a read (e.g. "Read of T") into the SideEffect.
8099 // * If there are any stores to reference final fields in the constructor:
8100 // ----> Use a more conservative "AllReads" SideEffect because any stores to any references
8101 // that are reachable from `fence_object` also need to be prevented for reordering
8102 // (and we do not want to do alias analysis to figure out what those stores are).
8103 //
8104 // In the implementation, this initially starts out as an "all reads" side effect; this is an
8105 // even more conservative approach than the one described above, and prevents all of the
8106 // above reordering without analyzing any of the instructions in the constructor.
8107 //
8108 // If in a later phase we discover that there are no writes to reference final fields,
8109 // we can refine the side effect to a smaller set of type reads (see above constraints).
8110 : HVariableInputSizeInstruction(kConstructorFence,
8111 SideEffects::AllReads(),
8112 dex_pc,
8113 allocator,
8114 /* number_of_inputs= */ 1,
8115 kArenaAllocConstructorFenceInputs) {
8116 DCHECK(fence_object != nullptr);
8117 SetRawInputAt(0, fence_object);
8118 }
8119
8120 // The object associated with this constructor fence.
8121 //
8122 // (Note: This will be null after the prepare_for_register_allocation phase,
8123 // as all constructor fence inputs are removed there).
8124 HInstruction* GetFenceObject() const {
8125 return InputAt(0);
8126 }
8127
8128 // Find all the HConstructorFence uses (`fence_use`) for `this` and:
8129 // - Delete `fence_use` from `this`'s use list.
8130 // - Delete `this` from `fence_use`'s inputs list.
8131 // - If the `fence_use` is dead, remove it from the graph.
8132 //
8133 // A fence is considered dead once it no longer has any uses
8134 // and all of the inputs are dead.
8135 //
8136 // This must *not* be called during/after prepare_for_register_allocation,
8137 // because that removes all the inputs to the fences but the fence is actually
8138 // still considered live.
8139 //
8140 // Returns how many HConstructorFence instructions were removed from graph.
8141 static size_t RemoveConstructorFences(HInstruction* instruction);
8142
8143 // Combine all inputs of `this` and `other` instruction and remove
8144 // `other` from the graph.
8145 //
8146 // Inputs are unique after the merge.
8147 //
8148 // Requirement: `this` must not be the same as `other.
8149 void Merge(HConstructorFence* other);
8150
8151 // Check if this constructor fence is protecting
8152 // an HNewInstance or HNewArray that is also the immediate
8153 // predecessor of `this`.
8154 //
8155 // If `ignore_inputs` is true, then the immediate predecessor doesn't need
8156 // to be one of the inputs of `this`.
8157 //
8158 // Returns the associated HNewArray or HNewInstance,
8159 // or null otherwise.
8160 HInstruction* GetAssociatedAllocation(bool ignore_inputs = false);
8161
8162 DECLARE_INSTRUCTION(ConstructorFence);
8163
8164 protected:
8165 DEFAULT_COPY_CONSTRUCTOR(ConstructorFence);
8166 };
8167
8168 class HMonitorOperation final : public HExpression<1> {
8169 public:
8170 enum class OperationKind {
8171 kEnter,
8172 kExit,
8173 kLast = kExit
8174 };
8175
8176 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
8177 : HExpression(kMonitorOperation,
8178 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
8179 dex_pc) {
8180 SetPackedField<OperationKindField>(kind);
8181 SetRawInputAt(0, object);
8182 }
8183
8184 // Instruction may go into runtime, so we need an environment.
8185 bool NeedsEnvironment() const override { return true; }
8186
8187 bool CanThrow() const override {
8188 // Verifier guarantees that monitor-exit cannot throw.
8189 // This is important because it allows the HGraphBuilder to remove
8190 // a dead throw-catch loop generated for `synchronized` blocks/methods.
8191 return IsEnter();
8192 }
8193
8194 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
8195 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
8196
8197 DECLARE_INSTRUCTION(MonitorOperation);
8198
8199 protected:
8200 DEFAULT_COPY_CONSTRUCTOR(MonitorOperation);
8201
8202 private:
8203 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
8204 static constexpr size_t kFieldOperationKindSize =
8205 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
8206 static constexpr size_t kNumberOfMonitorOperationPackedBits =
8207 kFieldOperationKind + kFieldOperationKindSize;
8208 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
8209 "Too many packed fields.");
8210 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
8211 };
8212
8213 class HSelect final : public HExpression<3> {
8214 public:
8215 HSelect(HInstruction* condition,
8216 HInstruction* true_value,
8217 HInstruction* false_value,
8218 uint32_t dex_pc)
8219 : HExpression(kSelect, HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
8220 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
8221
8222 // First input must be `true_value` or `false_value` to allow codegens to
8223 // use the SameAsFirstInput allocation policy. We make it `false_value`, so
8224 // that architectures which implement HSelect as a conditional move also
8225 // will not need to invert the condition.
8226 SetRawInputAt(0, false_value);
8227 SetRawInputAt(1, true_value);
8228 SetRawInputAt(2, condition);
8229 }
8230
8231 bool IsClonable() const override { return true; }
8232 HInstruction* GetFalseValue() const { return InputAt(0); }
8233 HInstruction* GetTrueValue() const { return InputAt(1); }
8234 HInstruction* GetCondition() const { return InputAt(2); }
8235
8236 bool CanBeMoved() const override { return true; }
8237 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
8238 return true;
8239 }
8240
8241 bool CanBeNull() const override {
8242 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
8243 }
8244
8245 void UpdateType() {
8246 DCHECK_EQ(HPhi::ToPhiType(GetTrueValue()->GetType()),
8247 HPhi::ToPhiType(GetFalseValue()->GetType()));
8248 SetPackedField<TypeField>(HPhi::ToPhiType(GetTrueValue()->GetType()));
8249 }
8250
8251 DECLARE_INSTRUCTION(Select);
8252
8253 protected:
8254 DEFAULT_COPY_CONSTRUCTOR(Select);
8255 };
8256
8257 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
8258 public:
8259 MoveOperands(Location source,
8260 Location destination,
8261 DataType::Type type,
8262 HInstruction* instruction)
8263 : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
8264
8265 Location GetSource() const { return source_; }
8266 Location GetDestination() const { return destination_; }
8267
8268 void SetSource(Location value) { source_ = value; }
8269 void SetDestination(Location value) { destination_ = value; }
8270
8271 // The parallel move resolver marks moves as "in-progress" by clearing the
8272 // destination (but not the source).
8273 Location MarkPending() {
8274 DCHECK(!IsPending());
8275 Location dest = destination_;
8276 destination_ = Location::NoLocation();
8277 return dest;
8278 }
8279
8280 void ClearPending(Location dest) {
8281 DCHECK(IsPending());
8282 destination_ = dest;
8283 }
8284
8285 bool IsPending() const {
8286 DCHECK(source_.IsValid() || destination_.IsInvalid());
8287 return destination_.IsInvalid() && source_.IsValid();
8288 }
8289
8290 // True if this blocks a move from the given location.
8291 bool Blocks(Location loc) const {
8292 return !IsEliminated() && source_.OverlapsWith(loc);
8293 }
8294
8295 // A move is redundant if it's been eliminated, if its source and
8296 // destination are the same, or if its destination is unneeded.
8297 bool IsRedundant() const {
8298 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
8299 }
8300
8301 // We clear both operands to indicate move that's been eliminated.
8302 void Eliminate() {
8303 source_ = destination_ = Location::NoLocation();
8304 }
8305
8306 bool IsEliminated() const {
8307 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
8308 return source_.IsInvalid();
8309 }
8310
8311 DataType::Type GetType() const { return type_; }
8312
8313 bool Is64BitMove() const {
8314 return DataType::Is64BitType(type_);
8315 }
8316
8317 HInstruction* GetInstruction() const { return instruction_; }
8318
8319 private:
8320 Location source_;
8321 Location destination_;
8322 // The type this move is for.
8323 DataType::Type type_;
8324 // The instruction this move is assocatied with. Null when this move is
8325 // for moving an input in the expected locations of user (including a phi user).
8326 // This is only used in debug mode, to ensure we do not connect interval siblings
8327 // in the same parallel move.
8328 HInstruction* instruction_;
8329 };
8330
8331 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
8332
8333 static constexpr size_t kDefaultNumberOfMoves = 4;
8334
8335 class HParallelMove final : public HExpression<0> {
8336 public:
8337 explicit HParallelMove(ArenaAllocator* allocator, uint32_t dex_pc = kNoDexPc)
8338 : HExpression(kParallelMove, SideEffects::None(), dex_pc),
8339 moves_(allocator->Adapter(kArenaAllocMoveOperands)) {
8340 moves_.reserve(kDefaultNumberOfMoves);
8341 }
8342
8343 void AddMove(Location source,
8344 Location destination,
8345 DataType::Type type,
8346 HInstruction* instruction) {
8347 DCHECK(source.IsValid());
8348 DCHECK(destination.IsValid());
8349 if (kIsDebugBuild) {
8350 if (instruction != nullptr) {
8351 for (const MoveOperands& move : moves_) {
8352 if (move.GetInstruction() == instruction) {
8353 // Special case the situation where the move is for the spill slot
8354 // of the instruction.
8355 if ((GetPrevious() == instruction)
8356 || ((GetPrevious() == nullptr)
8357 && instruction->IsPhi()
8358 && instruction->GetBlock() == GetBlock())) {
8359 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
8360 << "Doing parallel moves for the same instruction.";
8361 } else {
8362 DCHECK(false) << "Doing parallel moves for the same instruction.";
8363 }
8364 }
8365 }
8366 }
8367 for (const MoveOperands& move : moves_) {
8368 DCHECK(!destination.OverlapsWith(move.GetDestination()))
8369 << "Overlapped destination for two moves in a parallel move: "
8370 << move.GetSource() << " ==> " << move.GetDestination() << " and "
8371 << source << " ==> " << destination << " for " << SafePrint(instruction);
8372 }
8373 }
8374 moves_.emplace_back(source, destination, type, instruction);
8375 }
8376
8377 MoveOperands* MoveOperandsAt(size_t index) {
8378 return &moves_[index];
8379 }
8380
8381 size_t NumMoves() const { return moves_.size(); }
8382
8383 DECLARE_INSTRUCTION(ParallelMove);
8384
8385 protected:
8386 DEFAULT_COPY_CONSTRUCTOR(ParallelMove);
8387
8388 private:
8389 ArenaVector<MoveOperands> moves_;
8390 };
8391
8392 class HBitwiseNegatedRight final : public HBinaryOperation {
8393 public:
8394 HBitwiseNegatedRight(DataType::Type result_type,
8395 InstructionKind op,
8396 HInstruction* left,
8397 HInstruction* right,
8398 uint32_t dex_pc = kNoDexPc)
8399 : HBinaryOperation(
8400 kBitwiseNegatedRight, result_type, left, right, SideEffects::None(), dex_pc),
8401 op_kind_(op) {
8402 DCHECK(op == HInstruction::kAnd || op == HInstruction::kOr || op == HInstruction::kXor) << op;
8403 }
8404
8405 template <typename T, typename U>
8406 auto Compute(T x, U y) const -> decltype(x & ~y) {
8407 static_assert(std::is_same<decltype(x & ~y), decltype(x | ~y)>::value &&
8408 std::is_same<decltype(x & ~y), decltype(x ^ ~y)>::value,
8409 "Inconsistent negated bitwise types");
8410 switch (op_kind_) {
8411 case HInstruction::kAnd:
8412 return x & ~y;
8413 case HInstruction::kOr:
8414 return x | ~y;
8415 case HInstruction::kXor:
8416 return x ^ ~y;
8417 default:
8418 LOG(FATAL) << "Unreachable";
8419 UNREACHABLE();
8420 }
8421 }
8422
8423 bool InstructionDataEquals(const HInstruction* other) const override {
8424 return op_kind_ == other->AsBitwiseNegatedRight()->op_kind_;
8425 }
8426
8427 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const override {
8428 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue(), y->GetValue()),
8429 GetDexPc());
8430 }
8431
8432 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const override {
8433 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue(), y->GetValue()),
8434 GetDexPc());
8435 }
8436
8437 InstructionKind GetOpKind() const { return op_kind_; }
8438
8439 DECLARE_INSTRUCTION(BitwiseNegatedRight);
8440
8441 protected:
8442 DEFAULT_COPY_CONSTRUCTOR(BitwiseNegatedRight);
8443
8444 private:
8445 // Specifies the bitwise operation, which will be then negated.
8446 const InstructionKind op_kind_;
8447 };
8448
8449 // This instruction computes an intermediate address pointing in the 'middle' of an object. The
8450 // result pointer cannot be handled by GC, so extra care is taken to make sure that this value is
8451 // never used across anything that can trigger GC.
8452 // The result of this instruction is not a pointer in the sense of `DataType::Type::kreference`.
8453 // So we represent it by the type `DataType::Type::kInt`.
8454 class HIntermediateAddress final : public HExpression<2> {
8455 public:
8456 HIntermediateAddress(HInstruction* base_address, HInstruction* offset, uint32_t dex_pc)
8457 : HExpression(kIntermediateAddress,
8458 DataType::Type::kInt32,
8459 SideEffects::DependsOnGC(),
8460 dex_pc) {
8461 DCHECK_EQ(DataType::Size(DataType::Type::kInt32),
8462 DataType::Size(DataType::Type::kReference))
8463 << "kPrimInt and kPrimNot have different sizes.";
8464 SetRawInputAt(0, base_address);
8465 SetRawInputAt(1, offset);
8466 }
8467
8468 bool IsClonable() const override { return true; }
8469 bool CanBeMoved() const override { return true; }
8470 bool InstructionDataEquals([[maybe_unused]] const HInstruction* other) const override {
8471 return true;
8472 }
8473 bool IsActualObject() const override { return false; }
8474
8475 HInstruction* GetBaseAddress() const { return InputAt(0); }
8476 HInstruction* GetOffset() const { return InputAt(1); }
8477
8478 DECLARE_INSTRUCTION(IntermediateAddress);
8479
8480 protected:
8481 DEFAULT_COPY_CONSTRUCTOR(IntermediateAddress);
8482 };
8483
8484
8485 } // namespace art
8486
8487 #include "nodes_vector.h"
8488
8489 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
8490 #include "nodes_shared.h"
8491 #endif
8492 #if defined(ART_ENABLE_CODEGEN_x86) || defined(ART_ENABLE_CODEGEN_x86_64)
8493 #include "nodes_x86.h"
8494 #endif
8495 #if defined(ART_ENABLE_CODEGEN_riscv64)
8496 #include "nodes_riscv64.h"
8497 #endif
8498
8499 namespace art HIDDEN {
8500
8501 class OptimizingCompilerStats;
8502
8503 class HGraphVisitor : public ValueObject {
8504 public:
8505 explicit HGraphVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8506 : stats_(stats),
8507 graph_(graph) {}
8508 virtual ~HGraphVisitor() {}
8509
8510 virtual void VisitInstruction([[maybe_unused]] HInstruction* instruction) {}
8511 virtual void VisitBasicBlock(HBasicBlock* block);
8512
8513 // Visit the graph following basic block insertion order.
8514 void VisitInsertionOrder();
8515
8516 // Visit the graph following dominator tree reverse post-order.
8517 void VisitReversePostOrder();
8518
8519 HGraph* GetGraph() const { return graph_; }
8520
8521 // Visit functions for instruction classes.
8522 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8523 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
8524
8525 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8526
8527 #undef DECLARE_VISIT_INSTRUCTION
8528
8529 protected:
8530 void VisitPhis(HBasicBlock* block);
8531 void VisitNonPhiInstructions(HBasicBlock* block);
8532
8533 OptimizingCompilerStats* stats_;
8534
8535 private:
8536 HGraph* const graph_;
8537
8538 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
8539 };
8540
8541 class HGraphDelegateVisitor : public HGraphVisitor {
8542 public:
8543 explicit HGraphDelegateVisitor(HGraph* graph, OptimizingCompilerStats* stats = nullptr)
8544 : HGraphVisitor(graph, stats) {}
8545 virtual ~HGraphDelegateVisitor() {}
8546
8547 // Visit functions that delegate to to super class.
8548 #define DECLARE_VISIT_INSTRUCTION(name, super) \
8549 void Visit##name(H##name* instr) override { Visit##super(instr); }
8550
8551 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
8552
8553 #undef DECLARE_VISIT_INSTRUCTION
8554
8555 private:
8556 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
8557 };
8558
8559 // Create a clone of the instruction, insert it into the graph; replace the old one with a new
8560 // and remove the old instruction.
8561 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr);
8562
8563 // Create a clone for each clonable instructions/phis and replace the original with the clone.
8564 //
8565 // Used for testing individual instruction cloner.
8566 class CloneAndReplaceInstructionVisitor final : public HGraphDelegateVisitor {
8567 public:
8568 explicit CloneAndReplaceInstructionVisitor(HGraph* graph)
8569 : HGraphDelegateVisitor(graph), instr_replaced_by_clones_count_(0) {}
8570
8571 void VisitInstruction(HInstruction* instruction) override {
8572 if (instruction->IsClonable()) {
8573 ReplaceInstrOrPhiByClone(instruction);
8574 instr_replaced_by_clones_count_++;
8575 }
8576 }
8577
8578 size_t GetInstrReplacedByClonesCount() const { return instr_replaced_by_clones_count_; }
8579
8580 private:
8581 size_t instr_replaced_by_clones_count_;
8582
8583 DISALLOW_COPY_AND_ASSIGN(CloneAndReplaceInstructionVisitor);
8584 };
8585
8586 // Iterator over the blocks that are part of the loop; includes blocks which are part
8587 // of an inner loop. The order in which the blocks are iterated is on their
8588 // block id.
8589 class HBlocksInLoopIterator : public ValueObject {
8590 public:
8591 explicit HBlocksInLoopIterator(const HLoopInformation& info)
8592 : blocks_in_loop_(info.GetBlocks()),
8593 blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
8594 index_(0) {
8595 if (!blocks_in_loop_.IsBitSet(index_)) {
8596 Advance();
8597 }
8598 }
8599
8600 bool Done() const { return index_ == blocks_.size(); }
8601 HBasicBlock* Current() const { return blocks_[index_]; }
8602 void Advance() {
8603 ++index_;
8604 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8605 if (blocks_in_loop_.IsBitSet(index_)) {
8606 break;
8607 }
8608 }
8609 }
8610
8611 private:
8612 const BitVector& blocks_in_loop_;
8613 const ArenaVector<HBasicBlock*>& blocks_;
8614 size_t index_;
8615
8616 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
8617 };
8618
8619 // Iterator over the blocks that are part of the loop; includes blocks which are part
8620 // of an inner loop. The order in which the blocks are iterated is reverse
8621 // post order.
8622 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
8623 public:
8624 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
8625 : blocks_in_loop_(info.GetBlocks()),
8626 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8627 index_(0) {
8628 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8629 Advance();
8630 }
8631 }
8632
8633 bool Done() const { return index_ == blocks_.size(); }
8634 HBasicBlock* Current() const { return blocks_[index_]; }
8635 void Advance() {
8636 ++index_;
8637 for (size_t e = blocks_.size(); index_ < e; ++index_) {
8638 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8639 break;
8640 }
8641 }
8642 }
8643
8644 private:
8645 const BitVector& blocks_in_loop_;
8646 const ArenaVector<HBasicBlock*>& blocks_;
8647 size_t index_;
8648
8649 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
8650 };
8651
8652 // Iterator over the blocks that are part of the loop; includes blocks which are part
8653 // of an inner loop. The order in which the blocks are iterated is post order.
8654 class HBlocksInLoopPostOrderIterator : public ValueObject {
8655 public:
8656 explicit HBlocksInLoopPostOrderIterator(const HLoopInformation& info)
8657 : blocks_in_loop_(info.GetBlocks()),
8658 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
8659 index_(blocks_.size() - 1) {
8660 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8661 Advance();
8662 }
8663 }
8664
8665 bool Done() const { return index_ < 0; }
8666 HBasicBlock* Current() const { return blocks_[index_]; }
8667 void Advance() {
8668 --index_;
8669 for (; index_ >= 0; --index_) {
8670 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
8671 break;
8672 }
8673 }
8674 }
8675
8676 private:
8677 const BitVector& blocks_in_loop_;
8678 const ArenaVector<HBasicBlock*>& blocks_;
8679
8680 int32_t index_;
8681
8682 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopPostOrderIterator);
8683 };
8684
8685 // Returns int64_t value of a properly typed constant.
8686 inline int64_t Int64FromConstant(HConstant* constant) {
8687 if (constant->IsIntConstant()) {
8688 return constant->AsIntConstant()->GetValue();
8689 } else if (constant->IsLongConstant()) {
8690 return constant->AsLongConstant()->GetValue();
8691 } else {
8692 DCHECK(constant->IsNullConstant()) << constant->DebugName();
8693 return 0;
8694 }
8695 }
8696
8697 // Returns true iff instruction is an integral constant (and sets value on success).
8698 inline bool IsInt64AndGet(HInstruction* instruction, /*out*/ int64_t* value) {
8699 if (instruction->IsIntConstant()) {
8700 *value = instruction->AsIntConstant()->GetValue();
8701 return true;
8702 } else if (instruction->IsLongConstant()) {
8703 *value = instruction->AsLongConstant()->GetValue();
8704 return true;
8705 } else if (instruction->IsNullConstant()) {
8706 *value = 0;
8707 return true;
8708 }
8709 return false;
8710 }
8711
8712 // Returns true iff instruction is the given integral constant.
8713 inline bool IsInt64Value(HInstruction* instruction, int64_t value) {
8714 int64_t val = 0;
8715 return IsInt64AndGet(instruction, &val) && val == value;
8716 }
8717
8718 // Returns true iff instruction is a zero bit pattern.
8719 inline bool IsZeroBitPattern(HInstruction* instruction) {
8720 return instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern();
8721 }
8722
8723 // Implement HInstruction::Is##type() for concrete instructions.
8724 #define INSTRUCTION_TYPE_CHECK(type, super) \
8725 inline bool HInstruction::Is##type() const { return GetKind() == k##type; }
8726 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8727 #undef INSTRUCTION_TYPE_CHECK
8728
8729 // Implement HInstruction::Is##type() for abstract instructions.
8730 #define INSTRUCTION_TYPE_CHECK_RESULT(type, super) \
8731 std::is_base_of<BaseType, H##type>::value,
8732 #define INSTRUCTION_TYPE_CHECK(type, super) \
8733 inline bool HInstruction::Is##type() const { \
8734 DCHECK_LT(GetKind(), kLastInstructionKind); \
8735 using BaseType = H##type; \
8736 static constexpr bool results[] = { \
8737 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK_RESULT) \
8738 }; \
8739 return results[static_cast<size_t>(GetKind())]; \
8740 }
8741
8742 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
8743 #undef INSTRUCTION_TYPE_CHECK
8744 #undef INSTRUCTION_TYPE_CHECK_RESULT
8745
8746 #define INSTRUCTION_TYPE_CAST(type, super) \
8747 inline const H##type* HInstruction::As##type() const { \
8748 DCHECK(Is##type()); \
8749 return down_cast<const H##type*>(this); \
8750 } \
8751 inline H##type* HInstruction::As##type() { \
8752 DCHECK(Is##type()); \
8753 return down_cast<H##type*>(this); \
8754 } \
8755 inline const H##type* HInstruction::As##type##OrNull() const { \
8756 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
8757 } \
8758 inline H##type* HInstruction::As##type##OrNull() { \
8759 return Is##type() ? down_cast<H##type*>(this) : nullptr; \
8760 }
8761
8762 FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CAST)
8763 #undef INSTRUCTION_TYPE_CAST
8764
8765
8766 // Create space in `blocks` for adding `number_of_new_blocks` entries
8767 // starting at location `at`. Blocks after `at` are moved accordingly.
8768 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
8769 size_t number_of_new_blocks,
8770 size_t after) {
8771 DCHECK_LT(after, blocks->size());
8772 size_t old_size = blocks->size();
8773 size_t new_size = old_size + number_of_new_blocks;
8774 blocks->resize(new_size);
8775 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
8776 }
8777
8778 /*
8779 * Hunt "under the hood" of array lengths (leading to array references),
8780 * null checks (also leading to array references), and new arrays
8781 * (leading to the actual length). This makes it more likely related
8782 * instructions become actually comparable.
8783 */
8784 inline HInstruction* HuntForDeclaration(HInstruction* instruction) {
8785 while (instruction->IsArrayLength() ||
8786 instruction->IsNullCheck() ||
8787 instruction->IsNewArray()) {
8788 instruction = instruction->IsNewArray()
8789 ? instruction->AsNewArray()->GetLength()
8790 : instruction->InputAt(0);
8791 }
8792 return instruction;
8793 }
8794
8795 inline bool IsAddOrSub(const HInstruction* instruction) {
8796 return instruction->IsAdd() || instruction->IsSub();
8797 }
8798
8799 void RemoveEnvironmentUses(HInstruction* instruction);
8800 bool HasEnvironmentUsedByOthers(HInstruction* instruction);
8801 void ResetEnvironmentInputRecords(HInstruction* instruction);
8802
8803 // Detects an instruction that is >= 0. As long as the value is carried by
8804 // a single instruction, arithmetic wrap-around cannot occur.
8805 bool IsGEZero(HInstruction* instruction);
8806
8807 } // namespace art
8808
8809 #endif // ART_COMPILER_OPTIMIZING_NODES_H_
8810