1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "nodes.h"
17
18 #include <algorithm>
19 #include <cfloat>
20 #include <functional>
21 #include <optional>
22
23 #include "art_method-inl.h"
24 #include "base/arena_allocator.h"
25 #include "base/arena_bit_vector.h"
26 #include "base/bit_utils.h"
27 #include "base/bit_vector-inl.h"
28 #include "base/bit_vector.h"
29 #include "base/iteration_range.h"
30 #include "base/logging.h"
31 #include "base/malloc_arena_pool.h"
32 #include "base/scoped_arena_allocator.h"
33 #include "base/scoped_arena_containers.h"
34 #include "base/stl_util.h"
35 #include "class_linker-inl.h"
36 #include "class_root-inl.h"
37 #include "code_generator.h"
38 #include "common_dominator.h"
39 #include "intrinsic_objects.h"
40 #include "intrinsics.h"
41 #include "intrinsics_list.h"
42 #include "mirror/class-inl.h"
43 #include "scoped_thread_state_change-inl.h"
44 #include "ssa_builder.h"
45
46 namespace art HIDDEN {
47
48 // Enable floating-point static evaluation during constant folding
49 // only if all floating-point operations and constants evaluate in the
50 // range and precision of the type used (i.e., 32-bit float, 64-bit
51 // double).
52 static constexpr bool kEnableFloatingPointStaticEvaluation = (FLT_EVAL_METHOD == 0);
53
CreateRootHandle(VariableSizedHandleScope * handles,ClassRoot class_root)54 ReferenceTypeInfo::TypeHandle HandleCache::CreateRootHandle(VariableSizedHandleScope* handles,
55 ClassRoot class_root) {
56 // Mutator lock is required for NewHandle and GetClassRoot().
57 ScopedObjectAccess soa(Thread::Current());
58 return handles->NewHandle(GetClassRoot(class_root));
59 }
60
AddBlock(HBasicBlock * block)61 void HGraph::AddBlock(HBasicBlock* block) {
62 block->SetBlockId(blocks_.size());
63 blocks_.push_back(block);
64 }
65
FindBackEdges(ArenaBitVector * visited)66 void HGraph::FindBackEdges(ArenaBitVector* visited) {
67 // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
68 DCHECK_EQ(visited->GetHighestBitSet(), -1);
69
70 // Allocate memory from local ScopedArenaAllocator.
71 ScopedArenaAllocator allocator(GetArenaStack());
72 // Nodes that we're currently visiting, indexed by block id.
73 ArenaBitVector visiting(
74 &allocator, blocks_.size(), /* expandable= */ false, kArenaAllocGraphBuilder);
75 // Number of successors visited from a given node, indexed by block id.
76 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
77 0u,
78 allocator.Adapter(kArenaAllocGraphBuilder));
79 // Stack of nodes that we're currently visiting (same as marked in "visiting" above).
80 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
81 constexpr size_t kDefaultWorklistSize = 8;
82 worklist.reserve(kDefaultWorklistSize);
83 visited->SetBit(entry_block_->GetBlockId());
84 visiting.SetBit(entry_block_->GetBlockId());
85 worklist.push_back(entry_block_);
86
87 while (!worklist.empty()) {
88 HBasicBlock* current = worklist.back();
89 uint32_t current_id = current->GetBlockId();
90 if (successors_visited[current_id] == current->GetSuccessors().size()) {
91 visiting.ClearBit(current_id);
92 worklist.pop_back();
93 } else {
94 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
95 uint32_t successor_id = successor->GetBlockId();
96 if (visiting.IsBitSet(successor_id)) {
97 DCHECK(ContainsElement(worklist, successor));
98 successor->AddBackEdge(current);
99 } else if (!visited->IsBitSet(successor_id)) {
100 visited->SetBit(successor_id);
101 visiting.SetBit(successor_id);
102 worklist.push_back(successor);
103 }
104 }
105 }
106 }
107
108 // Remove the environment use records of the instruction for users.
RemoveEnvironmentUses(HInstruction * instruction)109 void RemoveEnvironmentUses(HInstruction* instruction) {
110 for (HEnvironment* environment = instruction->GetEnvironment();
111 environment != nullptr;
112 environment = environment->GetParent()) {
113 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
114 if (environment->GetInstructionAt(i) != nullptr) {
115 environment->RemoveAsUserOfInput(i);
116 }
117 }
118 }
119 }
120
121 // Return whether the instruction has an environment and it's used by others.
HasEnvironmentUsedByOthers(HInstruction * instruction)122 bool HasEnvironmentUsedByOthers(HInstruction* instruction) {
123 for (HEnvironment* environment = instruction->GetEnvironment();
124 environment != nullptr;
125 environment = environment->GetParent()) {
126 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
127 HInstruction* user = environment->GetInstructionAt(i);
128 if (user != nullptr) {
129 return true;
130 }
131 }
132 }
133 return false;
134 }
135
136 // Reset environment records of the instruction itself.
ResetEnvironmentInputRecords(HInstruction * instruction)137 void ResetEnvironmentInputRecords(HInstruction* instruction) {
138 for (HEnvironment* environment = instruction->GetEnvironment();
139 environment != nullptr;
140 environment = environment->GetParent()) {
141 for (size_t i = 0, e = environment->Size(); i < e; ++i) {
142 DCHECK(environment->GetHolder() == instruction);
143 if (environment->GetInstructionAt(i) != nullptr) {
144 environment->SetRawEnvAt(i, nullptr);
145 }
146 }
147 }
148 }
149
RemoveAsUser(HInstruction * instruction)150 static void RemoveAsUser(HInstruction* instruction) {
151 instruction->RemoveAsUserOfAllInputs();
152 RemoveEnvironmentUses(instruction);
153 }
154
RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector & visited) const155 void HGraph::RemoveDeadBlocksInstructionsAsUsersAndDisconnect(const ArenaBitVector& visited) const {
156 for (size_t i = 0; i < blocks_.size(); ++i) {
157 if (!visited.IsBitSet(i)) {
158 HBasicBlock* block = blocks_[i];
159 if (block == nullptr) continue;
160
161 // Remove as user.
162 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
163 RemoveAsUser(it.Current());
164 }
165 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
166 RemoveAsUser(it.Current());
167 }
168
169 // Remove non-catch phi uses, and disconnect the block.
170 block->DisconnectFromSuccessors(&visited);
171 }
172 }
173 }
174
175 // This method assumes `insn` has been removed from all users with the exception of catch
176 // phis because of missing exceptional edges in the graph. It removes the
177 // instruction from catch phi uses, together with inputs of other catch phis in
178 // the catch block at the same index, as these must be dead too.
RemoveCatchPhiUsesOfDeadInstruction(HInstruction * insn)179 static void RemoveCatchPhiUsesOfDeadInstruction(HInstruction* insn) {
180 DCHECK(!insn->HasEnvironmentUses());
181 while (insn->HasNonEnvironmentUses()) {
182 const HUseListNode<HInstruction*>& use = insn->GetUses().front();
183 size_t use_index = use.GetIndex();
184 HBasicBlock* user_block = use.GetUser()->GetBlock();
185 DCHECK(use.GetUser()->IsPhi());
186 DCHECK(user_block->IsCatchBlock());
187 for (HInstructionIterator phi_it(user_block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
188 phi_it.Current()->AsPhi()->RemoveInputAt(use_index);
189 }
190 }
191 }
192
RemoveDeadBlocks(const ArenaBitVector & visited)193 void HGraph::RemoveDeadBlocks(const ArenaBitVector& visited) {
194 DCHECK(reverse_post_order_.empty()) << "We shouldn't have dominance information.";
195 for (size_t i = 0; i < blocks_.size(); ++i) {
196 if (!visited.IsBitSet(i)) {
197 HBasicBlock* block = blocks_[i];
198 if (block == nullptr) continue;
199
200 // Remove all remaining uses (which should be only catch phi uses), and the instructions.
201 block->RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ true);
202
203 // Remove the block from the list of blocks, so that further analyses
204 // never see it.
205 blocks_[i] = nullptr;
206 if (block->IsExitBlock()) {
207 SetExitBlock(nullptr);
208 }
209 // Mark the block as removed. This is used by the HGraphBuilder to discard
210 // the block as a branch target.
211 block->SetGraph(nullptr);
212 }
213 }
214 }
215
BuildDominatorTree()216 GraphAnalysisResult HGraph::BuildDominatorTree() {
217 // Allocate memory from local ScopedArenaAllocator.
218 ScopedArenaAllocator allocator(GetArenaStack());
219
220 ArenaBitVector visited(&allocator, blocks_.size(), false, kArenaAllocGraphBuilder);
221
222 // (1) Find the back edges in the graph doing a DFS traversal.
223 FindBackEdges(&visited);
224
225 // (2) Remove instructions and phis from blocks not visited during
226 // the initial DFS as users from other instructions, so that
227 // users can be safely removed before uses later.
228 // Also disconnect the block from its successors, updating the successor's phis if needed.
229 RemoveDeadBlocksInstructionsAsUsersAndDisconnect(visited);
230
231 // (3) Remove blocks not visited during the initial DFS.
232 // Step (5) requires dead blocks to be removed from the
233 // predecessors list of live blocks.
234 RemoveDeadBlocks(visited);
235
236 // (4) Simplify the CFG now, so that we don't need to recompute
237 // dominators and the reverse post order.
238 SimplifyCFG();
239
240 // (5) Compute the dominance information and the reverse post order.
241 ComputeDominanceInformation();
242
243 // (6) Analyze loops discovered through back edge analysis, and
244 // set the loop information on each block.
245 GraphAnalysisResult result = AnalyzeLoops();
246 if (result != kAnalysisSuccess) {
247 return result;
248 }
249
250 // (7) Precompute per-block try membership before entering the SSA builder,
251 // which needs the information to build catch block phis from values of
252 // locals at throwing instructions inside try blocks.
253 ComputeTryBlockInformation();
254
255 return kAnalysisSuccess;
256 }
257
RecomputeDominatorTree()258 GraphAnalysisResult HGraph::RecomputeDominatorTree() {
259 DCHECK(!HasIrreducibleLoops()) << "Recomputing loop information in graphs with irreducible loops "
260 << "is unsupported, as it could lead to loop header changes";
261 ClearLoopInformation();
262 ClearDominanceInformation();
263 return BuildDominatorTree();
264 }
265
ClearDominanceInformation()266 void HGraph::ClearDominanceInformation() {
267 for (HBasicBlock* block : GetActiveBlocks()) {
268 block->ClearDominanceInformation();
269 }
270 reverse_post_order_.clear();
271 }
272
ClearLoopInformation()273 void HGraph::ClearLoopInformation() {
274 SetHasLoops(false);
275 SetHasIrreducibleLoops(false);
276 for (HBasicBlock* block : GetActiveBlocks()) {
277 block->SetLoopInformation(nullptr);
278 }
279 }
280
ClearDominanceInformation()281 void HBasicBlock::ClearDominanceInformation() {
282 dominated_blocks_.clear();
283 dominator_ = nullptr;
284 }
285
GetFirstInstructionDisregardMoves() const286 HInstruction* HBasicBlock::GetFirstInstructionDisregardMoves() const {
287 HInstruction* instruction = GetFirstInstruction();
288 while (instruction->IsParallelMove()) {
289 instruction = instruction->GetNext();
290 }
291 return instruction;
292 }
293
UpdateDominatorOfSuccessor(HBasicBlock * block,HBasicBlock * successor)294 static bool UpdateDominatorOfSuccessor(HBasicBlock* block, HBasicBlock* successor) {
295 DCHECK(ContainsElement(block->GetSuccessors(), successor));
296
297 HBasicBlock* old_dominator = successor->GetDominator();
298 HBasicBlock* new_dominator =
299 (old_dominator == nullptr) ? block
300 : CommonDominator::ForPair(old_dominator, block);
301
302 if (old_dominator == new_dominator) {
303 return false;
304 } else {
305 successor->SetDominator(new_dominator);
306 return true;
307 }
308 }
309
ComputeDominanceInformation()310 void HGraph::ComputeDominanceInformation() {
311 DCHECK(reverse_post_order_.empty());
312 reverse_post_order_.reserve(blocks_.size());
313 reverse_post_order_.push_back(entry_block_);
314
315 // Allocate memory from local ScopedArenaAllocator.
316 ScopedArenaAllocator allocator(GetArenaStack());
317 // Number of visits of a given node, indexed by block id.
318 ScopedArenaVector<size_t> visits(blocks_.size(), 0u, allocator.Adapter(kArenaAllocGraphBuilder));
319 // Number of successors visited from a given node, indexed by block id.
320 ScopedArenaVector<size_t> successors_visited(blocks_.size(),
321 0u,
322 allocator.Adapter(kArenaAllocGraphBuilder));
323 // Nodes for which we need to visit successors.
324 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocGraphBuilder));
325 constexpr size_t kDefaultWorklistSize = 8;
326 worklist.reserve(kDefaultWorklistSize);
327 worklist.push_back(entry_block_);
328
329 while (!worklist.empty()) {
330 HBasicBlock* current = worklist.back();
331 uint32_t current_id = current->GetBlockId();
332 if (successors_visited[current_id] == current->GetSuccessors().size()) {
333 worklist.pop_back();
334 } else {
335 HBasicBlock* successor = current->GetSuccessors()[successors_visited[current_id]++];
336 UpdateDominatorOfSuccessor(current, successor);
337
338 // Once all the forward edges have been visited, we know the immediate
339 // dominator of the block. We can then start visiting its successors.
340 if (++visits[successor->GetBlockId()] ==
341 successor->GetPredecessors().size() - successor->NumberOfBackEdges()) {
342 reverse_post_order_.push_back(successor);
343 worklist.push_back(successor);
344 }
345 }
346 }
347
348 // Check if the graph has back edges not dominated by their respective headers.
349 // If so, we need to update the dominators of those headers and recursively of
350 // their successors. We do that with a fix-point iteration over all blocks.
351 // The algorithm is guaranteed to terminate because it loops only if the sum
352 // of all dominator chains has decreased in the current iteration.
353 bool must_run_fix_point = false;
354 for (HBasicBlock* block : blocks_) {
355 if (block != nullptr &&
356 block->IsLoopHeader() &&
357 block->GetLoopInformation()->HasBackEdgeNotDominatedByHeader()) {
358 must_run_fix_point = true;
359 break;
360 }
361 }
362 if (must_run_fix_point) {
363 bool update_occurred = true;
364 while (update_occurred) {
365 update_occurred = false;
366 for (HBasicBlock* block : GetReversePostOrder()) {
367 for (HBasicBlock* successor : block->GetSuccessors()) {
368 update_occurred |= UpdateDominatorOfSuccessor(block, successor);
369 }
370 }
371 }
372 }
373
374 // Make sure that there are no remaining blocks whose dominator information
375 // needs to be updated.
376 if (kIsDebugBuild) {
377 for (HBasicBlock* block : GetReversePostOrder()) {
378 for (HBasicBlock* successor : block->GetSuccessors()) {
379 DCHECK(!UpdateDominatorOfSuccessor(block, successor));
380 }
381 }
382 }
383
384 // Populate `dominated_blocks_` information after computing all dominators.
385 // The potential presence of irreducible loops requires to do it after.
386 for (HBasicBlock* block : GetReversePostOrder()) {
387 if (!block->IsEntryBlock()) {
388 block->GetDominator()->AddDominatedBlock(block);
389 }
390 }
391 }
392
SplitEdge(HBasicBlock * block,HBasicBlock * successor)393 HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
394 HBasicBlock* new_block = new (allocator_) HBasicBlock(this, successor->GetDexPc());
395 AddBlock(new_block);
396 // Use `InsertBetween` to ensure the predecessor index and successor index of
397 // `block` and `successor` are preserved.
398 new_block->InsertBetween(block, successor);
399 return new_block;
400 }
401
SplitCriticalEdge(HBasicBlock * block,HBasicBlock * successor)402 void HGraph::SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor) {
403 // Insert a new node between `block` and `successor` to split the
404 // critical edge.
405 HBasicBlock* new_block = SplitEdge(block, successor);
406 new_block->AddInstruction(new (allocator_) HGoto(successor->GetDexPc()));
407 if (successor->IsLoopHeader()) {
408 // If we split at a back edge boundary, make the new block the back edge.
409 HLoopInformation* info = successor->GetLoopInformation();
410 if (info->IsBackEdge(*block)) {
411 info->RemoveBackEdge(block);
412 info->AddBackEdge(new_block);
413 }
414 }
415 }
416
SplitEdgeAndUpdateRPO(HBasicBlock * block,HBasicBlock * successor)417 HBasicBlock* HGraph::SplitEdgeAndUpdateRPO(HBasicBlock* block, HBasicBlock* successor) {
418 HBasicBlock* new_block = SplitEdge(block, successor);
419 // In the RPO we have {... , block, ... , successor}. We want to insert `new_block` right after
420 // `block` to have a consistent RPO without recomputing the whole graph's RPO.
421 reverse_post_order_.insert(
422 reverse_post_order_.begin() + IndexOfElement(reverse_post_order_, block) + 1, new_block);
423 return new_block;
424 }
425
426 // Reorder phi inputs to match reordering of the block's predecessors.
FixPhisAfterPredecessorsReodering(HBasicBlock * block,size_t first,size_t second)427 static void FixPhisAfterPredecessorsReodering(HBasicBlock* block, size_t first, size_t second) {
428 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
429 HPhi* phi = it.Current()->AsPhi();
430 HInstruction* first_instr = phi->InputAt(first);
431 HInstruction* second_instr = phi->InputAt(second);
432 phi->ReplaceInput(first_instr, second);
433 phi->ReplaceInput(second_instr, first);
434 }
435 }
436
437 // Make sure that the first predecessor of a loop header is the incoming block.
OrderLoopHeaderPredecessors(HBasicBlock * header)438 void HGraph::OrderLoopHeaderPredecessors(HBasicBlock* header) {
439 DCHECK(header->IsLoopHeader());
440 HLoopInformation* info = header->GetLoopInformation();
441 if (info->IsBackEdge(*header->GetPredecessors()[0])) {
442 HBasicBlock* to_swap = header->GetPredecessors()[0];
443 for (size_t pred = 1, e = header->GetPredecessors().size(); pred < e; ++pred) {
444 HBasicBlock* predecessor = header->GetPredecessors()[pred];
445 if (!info->IsBackEdge(*predecessor)) {
446 header->predecessors_[pred] = to_swap;
447 header->predecessors_[0] = predecessor;
448 FixPhisAfterPredecessorsReodering(header, 0, pred);
449 break;
450 }
451 }
452 }
453 }
454
455 // Transform control flow of the loop to a single preheader format (don't touch the data flow).
456 // New_preheader can be already among the header predecessors - this situation will be correctly
457 // processed.
FixControlForNewSinglePreheader(HBasicBlock * header,HBasicBlock * new_preheader)458 static void FixControlForNewSinglePreheader(HBasicBlock* header, HBasicBlock* new_preheader) {
459 HLoopInformation* loop_info = header->GetLoopInformation();
460 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
461 HBasicBlock* predecessor = header->GetPredecessors()[pred];
462 if (!loop_info->IsBackEdge(*predecessor) && predecessor != new_preheader) {
463 predecessor->ReplaceSuccessor(header, new_preheader);
464 pred--;
465 }
466 }
467 }
468
469 // == Before == == After ==
470 // _________ _________ _________ _________
471 // | B0 | | B1 | (old preheaders) | B0 | | B1 |
472 // |=========| |=========| |=========| |=========|
473 // | i0 = .. | | i1 = .. | | i0 = .. | | i1 = .. |
474 // |_________| |_________| |_________| |_________|
475 // \ / \ /
476 // \ / ___v____________v___
477 // \ / (new preheader) | B20 <- B0, B1 |
478 // | | |====================|
479 // | | | i20 = phi(i0, i1) |
480 // | | |____________________|
481 // | | |
482 // /\ | | /\ /\ | /\
483 // / v_______v_________v_______v \ / v___________v_____________v \
484 // | | B10 <- B0, B1, B2, B3 | | | | B10 <- B20, B2, B3 | |
485 // | |===========================| | (header) | |===========================| |
486 // | | i10 = phi(i0, i1, i2, i3) | | | | i10 = phi(i20, i2, i3) | |
487 // | |___________________________| | | |___________________________| |
488 // | / \ | | / \ |
489 // | ... ... | | ... ... |
490 // | _________ _________ | | _________ _________ |
491 // | | B2 | | B3 | | | | B2 | | B3 | |
492 // | |=========| |=========| | (back edges) | |=========| |=========| |
493 // | | i2 = .. | | i3 = .. | | | | i2 = .. | | i3 = .. | |
494 // | |_________| |_________| | | |_________| |_________| |
495 // \ / \ / \ / \ /
496 // \___/ \___/ \___/ \___/
497 //
TransformLoopToSinglePreheaderFormat(HBasicBlock * header)498 void HGraph::TransformLoopToSinglePreheaderFormat(HBasicBlock* header) {
499 HLoopInformation* loop_info = header->GetLoopInformation();
500
501 HBasicBlock* preheader = new (allocator_) HBasicBlock(this, header->GetDexPc());
502 AddBlock(preheader);
503 preheader->AddInstruction(new (allocator_) HGoto(header->GetDexPc()));
504
505 // If the old header has no Phis then we only need to fix the control flow.
506 if (header->GetPhis().IsEmpty()) {
507 FixControlForNewSinglePreheader(header, preheader);
508 preheader->AddSuccessor(header);
509 return;
510 }
511
512 // Find the first non-back edge block in the header's predecessors list.
513 size_t first_nonbackedge_pred_pos = 0;
514 bool found = false;
515 for (size_t pred = 0; pred < header->GetPredecessors().size(); ++pred) {
516 HBasicBlock* predecessor = header->GetPredecessors()[pred];
517 if (!loop_info->IsBackEdge(*predecessor)) {
518 first_nonbackedge_pred_pos = pred;
519 found = true;
520 break;
521 }
522 }
523
524 DCHECK(found);
525
526 // Fix the data-flow.
527 for (HInstructionIterator it(header->GetPhis()); !it.Done(); it.Advance()) {
528 HPhi* header_phi = it.Current()->AsPhi();
529
530 HPhi* preheader_phi = new (GetAllocator()) HPhi(GetAllocator(),
531 header_phi->GetRegNumber(),
532 0,
533 header_phi->GetType());
534 if (header_phi->GetType() == DataType::Type::kReference) {
535 preheader_phi->SetReferenceTypeInfoIfValid(header_phi->GetReferenceTypeInfo());
536 }
537 preheader->AddPhi(preheader_phi);
538
539 HInstruction* orig_input = header_phi->InputAt(first_nonbackedge_pred_pos);
540 header_phi->ReplaceInput(preheader_phi, first_nonbackedge_pred_pos);
541 preheader_phi->AddInput(orig_input);
542
543 for (size_t input_pos = first_nonbackedge_pred_pos + 1;
544 input_pos < header_phi->InputCount();
545 input_pos++) {
546 HInstruction* input = header_phi->InputAt(input_pos);
547 HBasicBlock* pred_block = header->GetPredecessors()[input_pos];
548
549 if (loop_info->Contains(*pred_block)) {
550 DCHECK(loop_info->IsBackEdge(*pred_block));
551 } else {
552 preheader_phi->AddInput(input);
553 header_phi->RemoveInputAt(input_pos);
554 input_pos--;
555 }
556 }
557 }
558
559 // Fix the control-flow.
560 HBasicBlock* first_pred = header->GetPredecessors()[first_nonbackedge_pred_pos];
561 preheader->InsertBetween(first_pred, header);
562
563 FixControlForNewSinglePreheader(header, preheader);
564 }
565
SimplifyLoop(HBasicBlock * header)566 void HGraph::SimplifyLoop(HBasicBlock* header) {
567 HLoopInformation* info = header->GetLoopInformation();
568
569 // Make sure the loop has only one pre header. This simplifies SSA building by having
570 // to just look at the pre header to know which locals are initialized at entry of the
571 // loop. Also, don't allow the entry block to be a pre header: this simplifies inlining
572 // this graph.
573 size_t number_of_incomings = header->GetPredecessors().size() - info->NumberOfBackEdges();
574 if (number_of_incomings != 1 || (GetEntryBlock()->GetSingleSuccessor() == header)) {
575 TransformLoopToSinglePreheaderFormat(header);
576 }
577
578 OrderLoopHeaderPredecessors(header);
579
580 HInstruction* first_instruction = header->GetFirstInstruction();
581 if (first_instruction != nullptr && first_instruction->IsSuspendCheck()) {
582 // Called from DeadBlockElimination. Update SuspendCheck pointer.
583 info->SetSuspendCheck(first_instruction->AsSuspendCheck());
584 }
585 }
586
ComputeTryBlockInformation()587 void HGraph::ComputeTryBlockInformation() {
588 // Iterate in reverse post order to propagate try membership information from
589 // predecessors to their successors.
590 bool graph_has_try_catch = false;
591
592 for (HBasicBlock* block : GetReversePostOrder()) {
593 if (block->IsEntryBlock() || block->IsCatchBlock()) {
594 // Catch blocks after simplification have only exceptional predecessors
595 // and hence are never in tries.
596 continue;
597 }
598
599 // Infer try membership from the first predecessor. Having simplified loops,
600 // the first predecessor can never be a back edge and therefore it must have
601 // been visited already and had its try membership set.
602 HBasicBlock* first_predecessor = block->GetPredecessors()[0];
603 DCHECK_IMPLIES(block->IsLoopHeader(),
604 !block->GetLoopInformation()->IsBackEdge(*first_predecessor));
605 const HTryBoundary* try_entry = first_predecessor->ComputeTryEntryOfSuccessors();
606 graph_has_try_catch |= try_entry != nullptr;
607 if (try_entry != nullptr &&
608 (block->GetTryCatchInformation() == nullptr ||
609 try_entry != &block->GetTryCatchInformation()->GetTryEntry())) {
610 // We are either setting try block membership for the first time or it
611 // has changed.
612 block->SetTryCatchInformation(new (allocator_) TryCatchInformation(*try_entry));
613 }
614 }
615
616 SetHasTryCatch(graph_has_try_catch);
617 }
618
SimplifyCFG()619 void HGraph::SimplifyCFG() {
620 // Simplify the CFG for future analysis, and code generation:
621 // (1): Split critical edges.
622 // (2): Simplify loops by having only one preheader.
623 // NOTE: We're appending new blocks inside the loop, so we need to use index because iterators
624 // can be invalidated. We remember the initial size to avoid iterating over the new blocks.
625 for (size_t block_id = 0u, end = blocks_.size(); block_id != end; ++block_id) {
626 HBasicBlock* block = blocks_[block_id];
627 if (block == nullptr) continue;
628 if (block->GetSuccessors().size() > 1) {
629 // Only split normal-flow edges. We cannot split exceptional edges as they
630 // are synthesized (approximate real control flow), and we do not need to
631 // anyway. Moves that would be inserted there are performed by the runtime.
632 ArrayRef<HBasicBlock* const> normal_successors = block->GetNormalSuccessors();
633 for (size_t j = 0, e = normal_successors.size(); j < e; ++j) {
634 HBasicBlock* successor = normal_successors[j];
635 DCHECK(!successor->IsCatchBlock());
636 if (successor == exit_block_) {
637 // (Throw/Return/ReturnVoid)->TryBoundary->Exit. Special case which we
638 // do not want to split because Goto->Exit is not allowed.
639 DCHECK(block->IsSingleTryBoundary());
640 } else if (successor->GetPredecessors().size() > 1) {
641 SplitCriticalEdge(block, successor);
642 // SplitCriticalEdge could have invalidated the `normal_successors`
643 // ArrayRef. We must re-acquire it.
644 normal_successors = block->GetNormalSuccessors();
645 DCHECK_EQ(normal_successors[j]->GetSingleSuccessor(), successor);
646 DCHECK_EQ(e, normal_successors.size());
647 }
648 }
649 }
650 if (block->IsLoopHeader()) {
651 SimplifyLoop(block);
652 } else if (!block->IsEntryBlock() &&
653 block->GetFirstInstruction() != nullptr &&
654 block->GetFirstInstruction()->IsSuspendCheck()) {
655 // We are being called by the dead code elimiation pass, and what used to be
656 // a loop got dismantled. Just remove the suspend check.
657 block->RemoveInstruction(block->GetFirstInstruction());
658 }
659 }
660 }
661
AnalyzeLoops() const662 GraphAnalysisResult HGraph::AnalyzeLoops() const {
663 // We iterate post order to ensure we visit inner loops before outer loops.
664 // `PopulateRecursive` needs this guarantee to know whether a natural loop
665 // contains an irreducible loop.
666 for (HBasicBlock* block : GetPostOrder()) {
667 if (block->IsLoopHeader()) {
668 if (block->IsCatchBlock()) {
669 // TODO: Dealing with exceptional back edges could be tricky because
670 // they only approximate the real control flow. Bail out for now.
671 VLOG(compiler) << "Not compiled: Exceptional back edges";
672 return kAnalysisFailThrowCatchLoop;
673 }
674 block->GetLoopInformation()->Populate();
675 }
676 }
677 return kAnalysisSuccess;
678 }
679
Dump(std::ostream & os)680 void HLoopInformation::Dump(std::ostream& os) {
681 os << "header: " << header_->GetBlockId() << std::endl;
682 os << "pre header: " << GetPreHeader()->GetBlockId() << std::endl;
683 for (HBasicBlock* block : back_edges_) {
684 os << "back edge: " << block->GetBlockId() << std::endl;
685 }
686 for (HBasicBlock* block : header_->GetPredecessors()) {
687 os << "predecessor: " << block->GetBlockId() << std::endl;
688 }
689 for (uint32_t idx : blocks_.Indexes()) {
690 os << " in loop: " << idx << std::endl;
691 }
692 }
693
InsertConstant(HConstant * constant)694 void HGraph::InsertConstant(HConstant* constant) {
695 // New constants are inserted before the SuspendCheck at the bottom of the
696 // entry block. Note that this method can be called from the graph builder and
697 // the entry block therefore may not end with SuspendCheck->Goto yet.
698 HInstruction* insert_before = nullptr;
699
700 HInstruction* gota = entry_block_->GetLastInstruction();
701 if (gota != nullptr && gota->IsGoto()) {
702 HInstruction* suspend_check = gota->GetPrevious();
703 if (suspend_check != nullptr && suspend_check->IsSuspendCheck()) {
704 insert_before = suspend_check;
705 } else {
706 insert_before = gota;
707 }
708 }
709
710 if (insert_before == nullptr) {
711 entry_block_->AddInstruction(constant);
712 } else {
713 entry_block_->InsertInstructionBefore(constant, insert_before);
714 }
715 }
716
GetNullConstant(uint32_t dex_pc)717 HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) {
718 // For simplicity, don't bother reviving the cached null constant if it is
719 // not null and not in a block. Otherwise, we need to clear the instruction
720 // id and/or any invariants the graph is assuming when adding new instructions.
721 if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
722 cached_null_constant_ = new (allocator_) HNullConstant(dex_pc);
723 cached_null_constant_->SetReferenceTypeInfo(GetInexactObjectRti());
724 InsertConstant(cached_null_constant_);
725 }
726 if (kIsDebugBuild) {
727 ScopedObjectAccess soa(Thread::Current());
728 DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
729 }
730 return cached_null_constant_;
731 }
732
GetCurrentMethod()733 HCurrentMethod* HGraph::GetCurrentMethod() {
734 // For simplicity, don't bother reviving the cached current method if it is
735 // not null and not in a block. Otherwise, we need to clear the instruction
736 // id and/or any invariants the graph is assuming when adding new instructions.
737 if ((cached_current_method_ == nullptr) || (cached_current_method_->GetBlock() == nullptr)) {
738 cached_current_method_ = new (allocator_) HCurrentMethod(
739 Is64BitInstructionSet(instruction_set_) ? DataType::Type::kInt64 : DataType::Type::kInt32,
740 entry_block_->GetDexPc());
741 if (entry_block_->GetFirstInstruction() == nullptr) {
742 entry_block_->AddInstruction(cached_current_method_);
743 } else {
744 entry_block_->InsertInstructionBefore(
745 cached_current_method_, entry_block_->GetFirstInstruction());
746 }
747 }
748 return cached_current_method_;
749 }
750
GetMethodName() const751 const char* HGraph::GetMethodName() const {
752 const dex::MethodId& method_id = dex_file_.GetMethodId(method_idx_);
753 return dex_file_.GetMethodName(method_id);
754 }
755
PrettyMethod(bool with_signature) const756 std::string HGraph::PrettyMethod(bool with_signature) const {
757 return dex_file_.PrettyMethod(method_idx_, with_signature);
758 }
759
GetConstant(DataType::Type type,int64_t value,uint32_t dex_pc)760 HConstant* HGraph::GetConstant(DataType::Type type, int64_t value, uint32_t dex_pc) {
761 switch (type) {
762 case DataType::Type::kBool:
763 DCHECK(IsUint<1>(value));
764 FALLTHROUGH_INTENDED;
765 case DataType::Type::kUint8:
766 case DataType::Type::kInt8:
767 case DataType::Type::kUint16:
768 case DataType::Type::kInt16:
769 case DataType::Type::kInt32:
770 DCHECK(IsInt(DataType::Size(type) * kBitsPerByte, value));
771 return GetIntConstant(static_cast<int32_t>(value), dex_pc);
772
773 case DataType::Type::kInt64:
774 return GetLongConstant(value, dex_pc);
775
776 default:
777 LOG(FATAL) << "Unsupported constant type";
778 UNREACHABLE();
779 }
780 }
781
CacheFloatConstant(HFloatConstant * constant)782 void HGraph::CacheFloatConstant(HFloatConstant* constant) {
783 int32_t value = bit_cast<int32_t, float>(constant->GetValue());
784 DCHECK(cached_float_constants_.find(value) == cached_float_constants_.end());
785 cached_float_constants_.Overwrite(value, constant);
786 }
787
CacheDoubleConstant(HDoubleConstant * constant)788 void HGraph::CacheDoubleConstant(HDoubleConstant* constant) {
789 int64_t value = bit_cast<int64_t, double>(constant->GetValue());
790 DCHECK(cached_double_constants_.find(value) == cached_double_constants_.end());
791 cached_double_constants_.Overwrite(value, constant);
792 }
793
Add(HBasicBlock * block)794 void HLoopInformation::Add(HBasicBlock* block) {
795 blocks_.SetBit(block->GetBlockId());
796 }
797
Remove(HBasicBlock * block)798 void HLoopInformation::Remove(HBasicBlock* block) {
799 blocks_.ClearBit(block->GetBlockId());
800 }
801
PopulateRecursive(HBasicBlock * block)802 void HLoopInformation::PopulateRecursive(HBasicBlock* block) {
803 if (blocks_.IsBitSet(block->GetBlockId())) {
804 return;
805 }
806
807 blocks_.SetBit(block->GetBlockId());
808 block->SetInLoop(this);
809 if (block->IsLoopHeader()) {
810 // We're visiting loops in post-order, so inner loops must have been
811 // populated already.
812 DCHECK(block->GetLoopInformation()->IsPopulated());
813 if (block->GetLoopInformation()->IsIrreducible()) {
814 contains_irreducible_loop_ = true;
815 }
816 }
817 for (HBasicBlock* predecessor : block->GetPredecessors()) {
818 PopulateRecursive(predecessor);
819 }
820 }
821
PopulateIrreducibleRecursive(HBasicBlock * block,ArenaBitVector * finalized)822 void HLoopInformation::PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized) {
823 size_t block_id = block->GetBlockId();
824
825 // If `block` is in `finalized`, we know its membership in the loop has been
826 // decided and it does not need to be revisited.
827 if (finalized->IsBitSet(block_id)) {
828 return;
829 }
830
831 bool is_finalized = false;
832 if (block->IsLoopHeader()) {
833 // If we hit a loop header in an irreducible loop, we first check if the
834 // pre header of that loop belongs to the currently analyzed loop. If it does,
835 // then we visit the back edges.
836 // Note that we cannot use GetPreHeader, as the loop may have not been populated
837 // yet.
838 HBasicBlock* pre_header = block->GetPredecessors()[0];
839 PopulateIrreducibleRecursive(pre_header, finalized);
840 if (blocks_.IsBitSet(pre_header->GetBlockId())) {
841 block->SetInLoop(this);
842 blocks_.SetBit(block_id);
843 finalized->SetBit(block_id);
844 is_finalized = true;
845
846 HLoopInformation* info = block->GetLoopInformation();
847 for (HBasicBlock* back_edge : info->GetBackEdges()) {
848 PopulateIrreducibleRecursive(back_edge, finalized);
849 }
850 }
851 } else {
852 // Visit all predecessors. If one predecessor is part of the loop, this
853 // block is also part of this loop.
854 for (HBasicBlock* predecessor : block->GetPredecessors()) {
855 PopulateIrreducibleRecursive(predecessor, finalized);
856 if (!is_finalized && blocks_.IsBitSet(predecessor->GetBlockId())) {
857 block->SetInLoop(this);
858 blocks_.SetBit(block_id);
859 finalized->SetBit(block_id);
860 is_finalized = true;
861 }
862 }
863 }
864
865 // All predecessors have been recursively visited. Mark finalized if not marked yet.
866 if (!is_finalized) {
867 finalized->SetBit(block_id);
868 }
869 }
870
Populate()871 void HLoopInformation::Populate() {
872 DCHECK_EQ(blocks_.NumSetBits(), 0u) << "Loop information has already been populated";
873 // Populate this loop: starting with the back edge, recursively add predecessors
874 // that are not already part of that loop. Set the header as part of the loop
875 // to end the recursion.
876 // This is a recursive implementation of the algorithm described in
877 // "Advanced Compiler Design & Implementation" (Muchnick) p192.
878 HGraph* graph = header_->GetGraph();
879 blocks_.SetBit(header_->GetBlockId());
880 header_->SetInLoop(this);
881
882 bool is_irreducible_loop = HasBackEdgeNotDominatedByHeader();
883
884 if (is_irreducible_loop) {
885 // Allocate memory from local ScopedArenaAllocator.
886 ScopedArenaAllocator allocator(graph->GetArenaStack());
887 ArenaBitVector visited(&allocator,
888 graph->GetBlocks().size(),
889 /* expandable= */ false,
890 kArenaAllocGraphBuilder);
891 // Stop marking blocks at the loop header.
892 visited.SetBit(header_->GetBlockId());
893
894 for (HBasicBlock* back_edge : GetBackEdges()) {
895 PopulateIrreducibleRecursive(back_edge, &visited);
896 }
897 } else {
898 for (HBasicBlock* back_edge : GetBackEdges()) {
899 PopulateRecursive(back_edge);
900 }
901 }
902
903 if (!is_irreducible_loop && graph->IsCompilingOsr()) {
904 // When compiling in OSR mode, all loops in the compiled method may be entered
905 // from the interpreter. We treat this OSR entry point just like an extra entry
906 // to an irreducible loop, so we need to mark the method's loops as irreducible.
907 // This does not apply to inlined loops which do not act as OSR entry points.
908 if (suspend_check_ == nullptr) {
909 // Just building the graph in OSR mode, this loop is not inlined. We never build an
910 // inner graph in OSR mode as we can do OSR transition only from the outer method.
911 is_irreducible_loop = true;
912 } else {
913 // Look at the suspend check's environment to determine if the loop was inlined.
914 DCHECK(suspend_check_->HasEnvironment());
915 if (!suspend_check_->GetEnvironment()->IsFromInlinedInvoke()) {
916 is_irreducible_loop = true;
917 }
918 }
919 }
920 if (is_irreducible_loop) {
921 irreducible_ = true;
922 contains_irreducible_loop_ = true;
923 graph->SetHasIrreducibleLoops(true);
924 }
925 graph->SetHasLoops(true);
926 }
927
PopulateInnerLoopUpwards(HLoopInformation * inner_loop)928 void HLoopInformation::PopulateInnerLoopUpwards(HLoopInformation* inner_loop) {
929 DCHECK(inner_loop->GetPreHeader()->GetLoopInformation() == this);
930 blocks_.Union(&inner_loop->blocks_);
931 HLoopInformation* outer_loop = GetPreHeader()->GetLoopInformation();
932 if (outer_loop != nullptr) {
933 outer_loop->PopulateInnerLoopUpwards(this);
934 }
935 }
936
GetPreHeader() const937 HBasicBlock* HLoopInformation::GetPreHeader() const {
938 HBasicBlock* block = header_->GetPredecessors()[0];
939 DCHECK(irreducible_ || (block == header_->GetDominator()));
940 return block;
941 }
942
Contains(const HBasicBlock & block) const943 bool HLoopInformation::Contains(const HBasicBlock& block) const {
944 return blocks_.IsBitSet(block.GetBlockId());
945 }
946
IsIn(const HLoopInformation & other) const947 bool HLoopInformation::IsIn(const HLoopInformation& other) const {
948 return other.blocks_.IsBitSet(header_->GetBlockId());
949 }
950
IsDefinedOutOfTheLoop(HInstruction * instruction) const951 bool HLoopInformation::IsDefinedOutOfTheLoop(HInstruction* instruction) const {
952 return !blocks_.IsBitSet(instruction->GetBlock()->GetBlockId());
953 }
954
GetLifetimeEnd() const955 size_t HLoopInformation::GetLifetimeEnd() const {
956 size_t last_position = 0;
957 for (HBasicBlock* back_edge : GetBackEdges()) {
958 last_position = std::max(back_edge->GetLifetimeEnd(), last_position);
959 }
960 return last_position;
961 }
962
HasBackEdgeNotDominatedByHeader() const963 bool HLoopInformation::HasBackEdgeNotDominatedByHeader() const {
964 for (HBasicBlock* back_edge : GetBackEdges()) {
965 DCHECK(back_edge->GetDominator() != nullptr);
966 if (!header_->Dominates(back_edge)) {
967 return true;
968 }
969 }
970 return false;
971 }
972
DominatesAllBackEdges(HBasicBlock * block)973 bool HLoopInformation::DominatesAllBackEdges(HBasicBlock* block) {
974 for (HBasicBlock* back_edge : GetBackEdges()) {
975 if (!block->Dominates(back_edge)) {
976 return false;
977 }
978 }
979 return true;
980 }
981
982
HasExitEdge() const983 bool HLoopInformation::HasExitEdge() const {
984 // Determine if this loop has at least one exit edge.
985 HBlocksInLoopReversePostOrderIterator it_loop(*this);
986 for (; !it_loop.Done(); it_loop.Advance()) {
987 for (HBasicBlock* successor : it_loop.Current()->GetSuccessors()) {
988 if (!Contains(*successor)) {
989 return true;
990 }
991 }
992 }
993 return false;
994 }
995
Dominates(const HBasicBlock * other) const996 bool HBasicBlock::Dominates(const HBasicBlock* other) const {
997 // Walk up the dominator tree from `other`, to find out if `this`
998 // is an ancestor.
999 const HBasicBlock* current = other;
1000 while (current != nullptr) {
1001 if (current == this) {
1002 return true;
1003 }
1004 current = current->GetDominator();
1005 }
1006 return false;
1007 }
1008
UpdateInputsUsers(HInstruction * instruction)1009 static void UpdateInputsUsers(HInstruction* instruction) {
1010 HInputsRef inputs = instruction->GetInputs();
1011 for (size_t i = 0; i < inputs.size(); ++i) {
1012 inputs[i]->AddUseAt(instruction, i);
1013 }
1014 // Environment should be created later.
1015 DCHECK(!instruction->HasEnvironment());
1016 }
1017
ReplaceAndRemovePhiWith(HPhi * initial,HPhi * replacement)1018 void HBasicBlock::ReplaceAndRemovePhiWith(HPhi* initial, HPhi* replacement) {
1019 DCHECK(initial->GetBlock() == this);
1020 InsertPhiAfter(replacement, initial);
1021 initial->ReplaceWith(replacement);
1022 RemovePhi(initial);
1023 }
1024
ReplaceAndRemoveInstructionWith(HInstruction * initial,HInstruction * replacement)1025 void HBasicBlock::ReplaceAndRemoveInstructionWith(HInstruction* initial,
1026 HInstruction* replacement) {
1027 DCHECK(initial->GetBlock() == this);
1028 if (initial->IsControlFlow()) {
1029 // We can only replace a control flow instruction with another control flow instruction.
1030 DCHECK(replacement->IsControlFlow());
1031 DCHECK_EQ(replacement->GetId(), -1);
1032 DCHECK_EQ(replacement->GetType(), DataType::Type::kVoid);
1033 DCHECK_EQ(initial->GetBlock(), this);
1034 DCHECK_EQ(initial->GetType(), DataType::Type::kVoid);
1035 DCHECK(initial->GetUses().empty());
1036 DCHECK(initial->GetEnvUses().empty());
1037 replacement->SetBlock(this);
1038 replacement->SetId(GetGraph()->GetNextInstructionId());
1039 instructions_.InsertInstructionBefore(replacement, initial);
1040 UpdateInputsUsers(replacement);
1041 } else {
1042 InsertInstructionBefore(replacement, initial);
1043 initial->ReplaceWith(replacement);
1044 }
1045 RemoveInstruction(initial);
1046 }
1047
Add(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction)1048 static void Add(HInstructionList* instruction_list,
1049 HBasicBlock* block,
1050 HInstruction* instruction) {
1051 DCHECK(instruction->GetBlock() == nullptr);
1052 DCHECK_EQ(instruction->GetId(), -1);
1053 instruction->SetBlock(block);
1054 instruction->SetId(block->GetGraph()->GetNextInstructionId());
1055 UpdateInputsUsers(instruction);
1056 instruction_list->AddInstruction(instruction);
1057 }
1058
AddInstruction(HInstruction * instruction)1059 void HBasicBlock::AddInstruction(HInstruction* instruction) {
1060 Add(&instructions_, this, instruction);
1061 }
1062
AddPhi(HPhi * phi)1063 void HBasicBlock::AddPhi(HPhi* phi) {
1064 Add(&phis_, this, phi);
1065 }
1066
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1067 void HBasicBlock::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1068 DCHECK(!cursor->IsPhi());
1069 DCHECK(!instruction->IsPhi());
1070 DCHECK_EQ(instruction->GetId(), -1);
1071 DCHECK_NE(cursor->GetId(), -1);
1072 DCHECK_EQ(cursor->GetBlock(), this);
1073 DCHECK(!instruction->IsControlFlow());
1074 instruction->SetBlock(this);
1075 instruction->SetId(GetGraph()->GetNextInstructionId());
1076 UpdateInputsUsers(instruction);
1077 instructions_.InsertInstructionBefore(instruction, cursor);
1078 }
1079
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1080 void HBasicBlock::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1081 DCHECK(!cursor->IsPhi());
1082 DCHECK(!instruction->IsPhi());
1083 DCHECK_EQ(instruction->GetId(), -1);
1084 DCHECK_NE(cursor->GetId(), -1);
1085 DCHECK_EQ(cursor->GetBlock(), this);
1086 DCHECK(!instruction->IsControlFlow());
1087 DCHECK(!cursor->IsControlFlow());
1088 instruction->SetBlock(this);
1089 instruction->SetId(GetGraph()->GetNextInstructionId());
1090 UpdateInputsUsers(instruction);
1091 instructions_.InsertInstructionAfter(instruction, cursor);
1092 }
1093
InsertPhiAfter(HPhi * phi,HPhi * cursor)1094 void HBasicBlock::InsertPhiAfter(HPhi* phi, HPhi* cursor) {
1095 DCHECK_EQ(phi->GetId(), -1);
1096 DCHECK_NE(cursor->GetId(), -1);
1097 DCHECK_EQ(cursor->GetBlock(), this);
1098 phi->SetBlock(this);
1099 phi->SetId(GetGraph()->GetNextInstructionId());
1100 UpdateInputsUsers(phi);
1101 phis_.InsertInstructionAfter(phi, cursor);
1102 }
1103
Remove(HInstructionList * instruction_list,HBasicBlock * block,HInstruction * instruction,bool ensure_safety)1104 static void Remove(HInstructionList* instruction_list,
1105 HBasicBlock* block,
1106 HInstruction* instruction,
1107 bool ensure_safety) {
1108 DCHECK_EQ(block, instruction->GetBlock());
1109 instruction->SetBlock(nullptr);
1110 instruction_list->RemoveInstruction(instruction);
1111 if (ensure_safety) {
1112 DCHECK(instruction->GetUses().empty());
1113 DCHECK(instruction->GetEnvUses().empty());
1114 RemoveAsUser(instruction);
1115 }
1116 }
1117
RemoveInstruction(HInstruction * instruction,bool ensure_safety)1118 void HBasicBlock::RemoveInstruction(HInstruction* instruction, bool ensure_safety) {
1119 DCHECK(!instruction->IsPhi());
1120 Remove(&instructions_, this, instruction, ensure_safety);
1121 }
1122
RemovePhi(HPhi * phi,bool ensure_safety)1123 void HBasicBlock::RemovePhi(HPhi* phi, bool ensure_safety) {
1124 Remove(&phis_, this, phi, ensure_safety);
1125 }
1126
RemoveInstructionOrPhi(HInstruction * instruction,bool ensure_safety)1127 void HBasicBlock::RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety) {
1128 if (instruction->IsPhi()) {
1129 RemovePhi(instruction->AsPhi(), ensure_safety);
1130 } else {
1131 RemoveInstruction(instruction, ensure_safety);
1132 }
1133 }
1134
CopyFrom(ArrayRef<HInstruction * const> locals)1135 void HEnvironment::CopyFrom(ArrayRef<HInstruction* const> locals) {
1136 for (size_t i = 0; i < locals.size(); i++) {
1137 HInstruction* instruction = locals[i];
1138 SetRawEnvAt(i, instruction);
1139 if (instruction != nullptr) {
1140 instruction->AddEnvUseAt(this, i);
1141 }
1142 }
1143 }
1144
CopyFrom(HEnvironment * env)1145 void HEnvironment::CopyFrom(HEnvironment* env) {
1146 for (size_t i = 0; i < env->Size(); i++) {
1147 HInstruction* instruction = env->GetInstructionAt(i);
1148 SetRawEnvAt(i, instruction);
1149 if (instruction != nullptr) {
1150 instruction->AddEnvUseAt(this, i);
1151 }
1152 }
1153 }
1154
CopyFromWithLoopPhiAdjustment(HEnvironment * env,HBasicBlock * loop_header)1155 void HEnvironment::CopyFromWithLoopPhiAdjustment(HEnvironment* env,
1156 HBasicBlock* loop_header) {
1157 DCHECK(loop_header->IsLoopHeader());
1158 for (size_t i = 0; i < env->Size(); i++) {
1159 HInstruction* instruction = env->GetInstructionAt(i);
1160 SetRawEnvAt(i, instruction);
1161 if (instruction == nullptr) {
1162 continue;
1163 }
1164 if (instruction->IsLoopHeaderPhi() && (instruction->GetBlock() == loop_header)) {
1165 // At the end of the loop pre-header, the corresponding value for instruction
1166 // is the first input of the phi.
1167 HInstruction* initial = instruction->AsPhi()->InputAt(0);
1168 SetRawEnvAt(i, initial);
1169 initial->AddEnvUseAt(this, i);
1170 } else {
1171 instruction->AddEnvUseAt(this, i);
1172 }
1173 }
1174 }
1175
RemoveAsUserOfInput(size_t index) const1176 void HEnvironment::RemoveAsUserOfInput(size_t index) const {
1177 const HUserRecord<HEnvironment*>& env_use = vregs_[index];
1178 HInstruction* user = env_use.GetInstruction();
1179 auto before_env_use_node = env_use.GetBeforeUseNode();
1180 user->env_uses_.erase_after(before_env_use_node);
1181 user->FixUpUserRecordsAfterEnvUseRemoval(before_env_use_node);
1182 }
1183
ReplaceInput(HInstruction * replacement,size_t index)1184 void HEnvironment::ReplaceInput(HInstruction* replacement, size_t index) {
1185 const HUserRecord<HEnvironment*>& env_use_record = vregs_[index];
1186 HInstruction* orig_instr = env_use_record.GetInstruction();
1187
1188 DCHECK(orig_instr != replacement);
1189
1190 HUseList<HEnvironment*>::iterator before_use_node = env_use_record.GetBeforeUseNode();
1191 // Note: fixup_end remains valid across splice_after().
1192 auto fixup_end = replacement->env_uses_.empty() ? replacement->env_uses_.begin()
1193 : ++replacement->env_uses_.begin();
1194 replacement->env_uses_.splice_after(replacement->env_uses_.before_begin(),
1195 env_use_record.GetInstruction()->env_uses_,
1196 before_use_node);
1197 replacement->FixUpUserRecordsAfterEnvUseInsertion(fixup_end);
1198 orig_instr->FixUpUserRecordsAfterEnvUseRemoval(before_use_node);
1199 }
1200
Dump(std::ostream & os,bool dump_args)1201 std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
1202 // Note: Handle the case where the instruction has been removed from
1203 // the graph to support debugging output for failed gtests.
1204 HGraph* graph = (GetBlock() != nullptr) ? GetBlock()->GetGraph() : nullptr;
1205 HGraphVisualizer::DumpInstruction(&os, graph, this);
1206 if (dump_args) {
1207 // Allocate memory from local ScopedArenaAllocator.
1208 std::optional<MallocArenaPool> local_arena_pool;
1209 std::optional<ArenaStack> local_arena_stack;
1210 if (UNLIKELY(graph == nullptr)) {
1211 local_arena_pool.emplace();
1212 local_arena_stack.emplace(&local_arena_pool.value());
1213 }
1214 ScopedArenaAllocator allocator(
1215 graph != nullptr ? graph->GetArenaStack() : &local_arena_stack.value());
1216 // Instructions that we already visited. We print each instruction only once.
1217 ArenaBitVector visited(&allocator,
1218 (graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
1219 /* expandable= */ (graph == nullptr),
1220 kArenaAllocMisc);
1221 visited.SetBit(GetId());
1222 // Keep a queue of instructions with their indentations.
1223 ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
1224 auto add_args = [&queue](HInstruction* instruction, size_t indentation) {
1225 for (HInstruction* arg : ReverseRange(instruction->GetInputs())) {
1226 queue.emplace_front(arg, indentation);
1227 }
1228 };
1229 add_args(this, /*indentation=*/ 1u);
1230 while (!queue.empty()) {
1231 HInstruction* instruction;
1232 size_t indentation;
1233 std::tie(instruction, indentation) = queue.front();
1234 queue.pop_front();
1235 if (!visited.IsBitSet(instruction->GetId())) {
1236 visited.SetBit(instruction->GetId());
1237 os << '\n';
1238 for (size_t i = 0; i != indentation; ++i) {
1239 os << " ";
1240 }
1241 HGraphVisualizer::DumpInstruction(&os, graph, instruction);
1242 add_args(instruction, indentation + 1u);
1243 }
1244 }
1245 }
1246 return os;
1247 }
1248
GetNextDisregardingMoves() const1249 HInstruction* HInstruction::GetNextDisregardingMoves() const {
1250 HInstruction* next = GetNext();
1251 while (next != nullptr && next->IsParallelMove()) {
1252 next = next->GetNext();
1253 }
1254 return next;
1255 }
1256
GetPreviousDisregardingMoves() const1257 HInstruction* HInstruction::GetPreviousDisregardingMoves() const {
1258 HInstruction* previous = GetPrevious();
1259 while (previous != nullptr && previous->IsParallelMove()) {
1260 previous = previous->GetPrevious();
1261 }
1262 return previous;
1263 }
1264
AddInstruction(HInstruction * instruction)1265 void HInstructionList::AddInstruction(HInstruction* instruction) {
1266 if (first_instruction_ == nullptr) {
1267 DCHECK(last_instruction_ == nullptr);
1268 first_instruction_ = last_instruction_ = instruction;
1269 } else {
1270 DCHECK(last_instruction_ != nullptr);
1271 last_instruction_->next_ = instruction;
1272 instruction->previous_ = last_instruction_;
1273 last_instruction_ = instruction;
1274 }
1275 }
1276
InsertInstructionBefore(HInstruction * instruction,HInstruction * cursor)1277 void HInstructionList::InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor) {
1278 DCHECK(Contains(cursor));
1279 if (cursor == first_instruction_) {
1280 cursor->previous_ = instruction;
1281 instruction->next_ = cursor;
1282 first_instruction_ = instruction;
1283 } else {
1284 instruction->previous_ = cursor->previous_;
1285 instruction->next_ = cursor;
1286 cursor->previous_ = instruction;
1287 instruction->previous_->next_ = instruction;
1288 }
1289 }
1290
InsertInstructionAfter(HInstruction * instruction,HInstruction * cursor)1291 void HInstructionList::InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor) {
1292 DCHECK(Contains(cursor));
1293 if (cursor == last_instruction_) {
1294 cursor->next_ = instruction;
1295 instruction->previous_ = cursor;
1296 last_instruction_ = instruction;
1297 } else {
1298 instruction->next_ = cursor->next_;
1299 instruction->previous_ = cursor;
1300 cursor->next_ = instruction;
1301 instruction->next_->previous_ = instruction;
1302 }
1303 }
1304
RemoveInstruction(HInstruction * instruction)1305 void HInstructionList::RemoveInstruction(HInstruction* instruction) {
1306 if (instruction->previous_ != nullptr) {
1307 instruction->previous_->next_ = instruction->next_;
1308 }
1309 if (instruction->next_ != nullptr) {
1310 instruction->next_->previous_ = instruction->previous_;
1311 }
1312 if (instruction == first_instruction_) {
1313 first_instruction_ = instruction->next_;
1314 }
1315 if (instruction == last_instruction_) {
1316 last_instruction_ = instruction->previous_;
1317 }
1318 }
1319
Contains(HInstruction * instruction) const1320 bool HInstructionList::Contains(HInstruction* instruction) const {
1321 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1322 if (it.Current() == instruction) {
1323 return true;
1324 }
1325 }
1326 return false;
1327 }
1328
FoundBefore(const HInstruction * instruction1,const HInstruction * instruction2) const1329 bool HInstructionList::FoundBefore(const HInstruction* instruction1,
1330 const HInstruction* instruction2) const {
1331 DCHECK_EQ(instruction1->GetBlock(), instruction2->GetBlock());
1332 for (HInstructionIterator it(*this); !it.Done(); it.Advance()) {
1333 if (it.Current() == instruction2) {
1334 return false;
1335 }
1336 if (it.Current() == instruction1) {
1337 return true;
1338 }
1339 }
1340 LOG(FATAL) << "Did not find an order between two instructions of the same block.";
1341 UNREACHABLE();
1342 }
1343
Dominates(HInstruction * other_instruction) const1344 bool HInstruction::Dominates(HInstruction* other_instruction) const {
1345 return other_instruction == this || StrictlyDominates(other_instruction);
1346 }
1347
StrictlyDominates(HInstruction * other_instruction) const1348 bool HInstruction::StrictlyDominates(HInstruction* other_instruction) const {
1349 if (other_instruction == this) {
1350 // An instruction does not strictly dominate itself.
1351 return false;
1352 }
1353 HBasicBlock* block = GetBlock();
1354 HBasicBlock* other_block = other_instruction->GetBlock();
1355 if (block != other_block) {
1356 return GetBlock()->Dominates(other_instruction->GetBlock());
1357 } else {
1358 // If both instructions are in the same block, ensure this
1359 // instruction comes before `other_instruction`.
1360 if (IsPhi()) {
1361 if (!other_instruction->IsPhi()) {
1362 // Phis appear before non phi-instructions so this instruction
1363 // dominates `other_instruction`.
1364 return true;
1365 } else {
1366 // There is no order among phis.
1367 LOG(FATAL) << "There is no dominance between phis of a same block.";
1368 UNREACHABLE();
1369 }
1370 } else {
1371 // `this` is not a phi.
1372 if (other_instruction->IsPhi()) {
1373 // Phis appear before non phi-instructions so this instruction
1374 // does not dominate `other_instruction`.
1375 return false;
1376 } else {
1377 // Check whether this instruction comes before
1378 // `other_instruction` in the instruction list.
1379 return block->GetInstructions().FoundBefore(this, other_instruction);
1380 }
1381 }
1382 }
1383 }
1384
RemoveEnvironment()1385 void HInstruction::RemoveEnvironment() {
1386 RemoveEnvironmentUses(this);
1387 environment_ = nullptr;
1388 }
1389
ReplaceWith(HInstruction * other)1390 void HInstruction::ReplaceWith(HInstruction* other) {
1391 DCHECK(other != nullptr);
1392 // Note: fixup_end remains valid across splice_after().
1393 auto fixup_end = other->uses_.empty() ? other->uses_.begin() : ++other->uses_.begin();
1394 other->uses_.splice_after(other->uses_.before_begin(), uses_);
1395 other->FixUpUserRecordsAfterUseInsertion(fixup_end);
1396
1397 // Note: env_fixup_end remains valid across splice_after().
1398 auto env_fixup_end =
1399 other->env_uses_.empty() ? other->env_uses_.begin() : ++other->env_uses_.begin();
1400 other->env_uses_.splice_after(other->env_uses_.before_begin(), env_uses_);
1401 other->FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1402
1403 DCHECK(uses_.empty());
1404 DCHECK(env_uses_.empty());
1405 }
1406
ReplaceUsesDominatedBy(HInstruction * dominator,HInstruction * replacement,bool strictly_dominated)1407 void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator,
1408 HInstruction* replacement,
1409 bool strictly_dominated) {
1410 HBasicBlock* dominator_block = dominator->GetBlock();
1411 std::optional<ArenaBitVector> visited_blocks;
1412
1413 // Lazily compute the dominated blocks to faster calculation of domination afterwards.
1414 auto maybe_generate_visited_blocks = [&visited_blocks, this, dominator_block]() {
1415 if (visited_blocks.has_value()) {
1416 return;
1417 }
1418 HGraph* graph = GetBlock()->GetGraph();
1419 visited_blocks.emplace(graph->GetAllocator(),
1420 graph->GetBlocks().size(),
1421 /* expandable= */ false,
1422 kArenaAllocMisc);
1423 ScopedArenaAllocator allocator(graph->GetArenaStack());
1424 ScopedArenaQueue<const HBasicBlock*> worklist(allocator.Adapter(kArenaAllocMisc));
1425 worklist.push(dominator_block);
1426
1427 while (!worklist.empty()) {
1428 const HBasicBlock* current = worklist.front();
1429 worklist.pop();
1430 visited_blocks->SetBit(current->GetBlockId());
1431 for (HBasicBlock* dominated : current->GetDominatedBlocks()) {
1432 if (visited_blocks->IsBitSet(dominated->GetBlockId())) {
1433 continue;
1434 }
1435 worklist.push(dominated);
1436 }
1437 }
1438 };
1439
1440 const HUseList<HInstruction*>& uses = GetUses();
1441 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1442 HInstruction* user = it->GetUser();
1443 HBasicBlock* block = user->GetBlock();
1444 size_t index = it->GetIndex();
1445 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1446 ++it;
1447 bool dominated = false;
1448 if (dominator_block == block) {
1449 // Trickier case, call the other methods.
1450 dominated =
1451 strictly_dominated ? dominator->StrictlyDominates(user) : dominator->Dominates(user);
1452 } else {
1453 // Block domination.
1454 maybe_generate_visited_blocks();
1455 dominated = visited_blocks->IsBitSet(block->GetBlockId());
1456 }
1457
1458 if (dominated) {
1459 user->ReplaceInput(replacement, index);
1460 } else if (user->IsPhi() && !user->AsPhi()->IsCatchPhi()) {
1461 // If the input flows from a block dominated by `dominator`, we can replace it.
1462 // We do not perform this for catch phis as we don't have control flow support
1463 // for their inputs.
1464 HBasicBlock* predecessor = block->GetPredecessors()[index];
1465 maybe_generate_visited_blocks();
1466 if (visited_blocks->IsBitSet(predecessor->GetBlockId())) {
1467 user->ReplaceInput(replacement, index);
1468 }
1469 }
1470 }
1471 }
1472
ReplaceEnvUsesDominatedBy(HInstruction * dominator,HInstruction * replacement)1473 void HInstruction::ReplaceEnvUsesDominatedBy(HInstruction* dominator, HInstruction* replacement) {
1474 const HUseList<HEnvironment*>& uses = GetEnvUses();
1475 for (auto it = uses.begin(), end = uses.end(); it != end; /* ++it below */) {
1476 HEnvironment* user = it->GetUser();
1477 size_t index = it->GetIndex();
1478 // Increment `it` now because `*it` may disappear thanks to user->ReplaceInput().
1479 ++it;
1480 if (dominator->StrictlyDominates(user->GetHolder())) {
1481 user->ReplaceInput(replacement, index);
1482 }
1483 }
1484 }
1485
ReplaceInput(HInstruction * replacement,size_t index)1486 void HInstruction::ReplaceInput(HInstruction* replacement, size_t index) {
1487 HUserRecord<HInstruction*> input_use = InputRecordAt(index);
1488 if (input_use.GetInstruction() == replacement) {
1489 // Nothing to do.
1490 return;
1491 }
1492 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1493 // Note: fixup_end remains valid across splice_after().
1494 auto fixup_end =
1495 replacement->uses_.empty() ? replacement->uses_.begin() : ++replacement->uses_.begin();
1496 replacement->uses_.splice_after(replacement->uses_.before_begin(),
1497 input_use.GetInstruction()->uses_,
1498 before_use_node);
1499 replacement->FixUpUserRecordsAfterUseInsertion(fixup_end);
1500 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1501 }
1502
EnvironmentSize() const1503 size_t HInstruction::EnvironmentSize() const {
1504 return HasEnvironment() ? environment_->Size() : 0;
1505 }
1506
AddInput(HInstruction * input)1507 void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
1508 DCHECK(input->GetBlock() != nullptr);
1509 inputs_.push_back(HUserRecord<HInstruction*>(input));
1510 input->AddUseAt(this, inputs_.size() - 1);
1511 }
1512
InsertInputAt(size_t index,HInstruction * input)1513 void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
1514 inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
1515 input->AddUseAt(this, index);
1516 // Update indexes in use nodes of inputs that have been pushed further back by the insert().
1517 for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
1518 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
1519 inputs_[i].GetUseNode()->SetIndex(i);
1520 }
1521 }
1522
RemoveInputAt(size_t index)1523 void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
1524 RemoveAsUserOfInput(index);
1525 inputs_.erase(inputs_.begin() + index);
1526 // Update indexes in use nodes of inputs that have been pulled forward by the erase().
1527 for (size_t i = index, e = inputs_.size(); i < e; ++i) {
1528 DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
1529 inputs_[i].GetUseNode()->SetIndex(i);
1530 }
1531 }
1532
RemoveAllInputs()1533 void HVariableInputSizeInstruction::RemoveAllInputs() {
1534 RemoveAsUserOfAllInputs();
1535 DCHECK(!HasNonEnvironmentUses());
1536
1537 inputs_.clear();
1538 DCHECK_EQ(0u, InputCount());
1539 }
1540
RemoveConstructorFences(HInstruction * instruction)1541 size_t HConstructorFence::RemoveConstructorFences(HInstruction* instruction) {
1542 DCHECK(instruction->GetBlock() != nullptr);
1543 // Removing constructor fences only makes sense for instructions with an object return type.
1544 DCHECK_EQ(DataType::Type::kReference, instruction->GetType());
1545
1546 // Return how many instructions were removed for statistic purposes.
1547 size_t remove_count = 0;
1548
1549 // Efficient implementation that simultaneously (in one pass):
1550 // * Scans the uses list for all constructor fences.
1551 // * Deletes that constructor fence from the uses list of `instruction`.
1552 // * Deletes `instruction` from the constructor fence's inputs.
1553 // * Deletes the constructor fence if it now has 0 inputs.
1554
1555 const HUseList<HInstruction*>& uses = instruction->GetUses();
1556 // Warning: Although this is "const", we might mutate the list when calling RemoveInputAt.
1557 for (auto it = uses.begin(), end = uses.end(); it != end; ) {
1558 const HUseListNode<HInstruction*>& use_node = *it;
1559 HInstruction* const use_instruction = use_node.GetUser();
1560
1561 // Advance the iterator immediately once we fetch the use_node.
1562 // Warning: If the input is removed, the current iterator becomes invalid.
1563 ++it;
1564
1565 if (use_instruction->IsConstructorFence()) {
1566 HConstructorFence* ctor_fence = use_instruction->AsConstructorFence();
1567 size_t input_index = use_node.GetIndex();
1568
1569 // Process the candidate instruction for removal
1570 // from the graph.
1571
1572 // Constructor fence instructions are never
1573 // used by other instructions.
1574 //
1575 // If we wanted to make this more generic, it
1576 // could be a runtime if statement.
1577 DCHECK(!ctor_fence->HasUses());
1578
1579 // A constructor fence's return type is "kPrimVoid"
1580 // and therefore it can't have any environment uses.
1581 DCHECK(!ctor_fence->HasEnvironmentUses());
1582
1583 // Remove the inputs first, otherwise removing the instruction
1584 // will try to remove its uses while we are already removing uses
1585 // and this operation will fail.
1586 DCHECK_EQ(instruction, ctor_fence->InputAt(input_index));
1587
1588 // Removing the input will also remove the `use_node`.
1589 // (Do not look at `use_node` after this, it will be a dangling reference).
1590 ctor_fence->RemoveInputAt(input_index);
1591
1592 // Once all inputs are removed, the fence is considered dead and
1593 // is removed.
1594 if (ctor_fence->InputCount() == 0u) {
1595 ctor_fence->GetBlock()->RemoveInstruction(ctor_fence);
1596 ++remove_count;
1597 }
1598 }
1599 }
1600
1601 if (kIsDebugBuild) {
1602 // Post-condition checks:
1603 // * None of the uses of `instruction` are a constructor fence.
1604 // * The `instruction` itself did not get removed from a block.
1605 for (const HUseListNode<HInstruction*>& use_node : instruction->GetUses()) {
1606 CHECK(!use_node.GetUser()->IsConstructorFence());
1607 }
1608 CHECK(instruction->GetBlock() != nullptr);
1609 }
1610
1611 return remove_count;
1612 }
1613
Merge(HConstructorFence * other)1614 void HConstructorFence::Merge(HConstructorFence* other) {
1615 // Do not delete yourself from the graph.
1616 DCHECK(this != other);
1617 // Don't try to merge with an instruction not associated with a block.
1618 DCHECK(other->GetBlock() != nullptr);
1619 // A constructor fence's return type is "kPrimVoid"
1620 // and therefore it cannot have any environment uses.
1621 DCHECK(!other->HasEnvironmentUses());
1622
1623 auto has_input = [](HInstruction* haystack, HInstruction* needle) {
1624 // Check if `haystack` has `needle` as any of its inputs.
1625 for (size_t input_count = 0; input_count < haystack->InputCount(); ++input_count) {
1626 if (haystack->InputAt(input_count) == needle) {
1627 return true;
1628 }
1629 }
1630 return false;
1631 };
1632
1633 // Add any inputs from `other` into `this` if it wasn't already an input.
1634 for (size_t input_count = 0; input_count < other->InputCount(); ++input_count) {
1635 HInstruction* other_input = other->InputAt(input_count);
1636 if (!has_input(this, other_input)) {
1637 AddInput(other_input);
1638 }
1639 }
1640
1641 other->GetBlock()->RemoveInstruction(other);
1642 }
1643
GetAssociatedAllocation(bool ignore_inputs)1644 HInstruction* HConstructorFence::GetAssociatedAllocation(bool ignore_inputs) {
1645 HInstruction* new_instance_inst = GetPrevious();
1646 // Check if the immediately preceding instruction is a new-instance/new-array.
1647 // Otherwise this fence is for protecting final fields.
1648 if (new_instance_inst != nullptr &&
1649 (new_instance_inst->IsNewInstance() || new_instance_inst->IsNewArray())) {
1650 if (ignore_inputs) {
1651 // If inputs are ignored, simply check if the predecessor is
1652 // *any* HNewInstance/HNewArray.
1653 //
1654 // Inputs are normally only ignored for prepare_for_register_allocation,
1655 // at which point *any* prior HNewInstance/Array can be considered
1656 // associated.
1657 return new_instance_inst;
1658 } else {
1659 // Normal case: There must be exactly 1 input and the previous instruction
1660 // must be that input.
1661 if (InputCount() == 1u && InputAt(0) == new_instance_inst) {
1662 return new_instance_inst;
1663 }
1664 }
1665 }
1666 return nullptr;
1667 }
1668
1669 #define DEFINE_ACCEPT(name, super) \
1670 void H##name::Accept(HGraphVisitor* visitor) { \
1671 visitor->Visit##name(this); \
1672 }
1673
FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)1674 FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
1675
1676 #undef DEFINE_ACCEPT
1677
1678 void HGraphVisitor::VisitInsertionOrder() {
1679 for (HBasicBlock* block : graph_->GetActiveBlocks()) {
1680 VisitBasicBlock(block);
1681 }
1682 }
1683
VisitReversePostOrder()1684 void HGraphVisitor::VisitReversePostOrder() {
1685 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
1686 VisitBasicBlock(block);
1687 }
1688 }
1689
VisitBasicBlock(HBasicBlock * block)1690 void HGraphVisitor::VisitBasicBlock(HBasicBlock* block) {
1691 VisitPhis(block);
1692 VisitNonPhiInstructions(block);
1693 }
1694
VisitPhis(HBasicBlock * block)1695 void HGraphVisitor::VisitPhis(HBasicBlock* block) {
1696 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
1697 DCHECK(it.Current()->IsPhi());
1698 VisitPhi(it.Current()->AsPhi());
1699 }
1700 }
1701
VisitNonPhiInstructions(HBasicBlock * block)1702 void HGraphVisitor::VisitNonPhiInstructions(HBasicBlock* block) {
1703 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
1704 DCHECK(!it.Current()->IsPhi());
1705 it.Current()->Accept(this);
1706 }
1707 }
1708
TryStaticEvaluation() const1709 HConstant* HTypeConversion::TryStaticEvaluation() const { return TryStaticEvaluation(GetInput()); }
1710
TryStaticEvaluation(HInstruction * input) const1711 HConstant* HTypeConversion::TryStaticEvaluation(HInstruction* input) const {
1712 HGraph* graph = input->GetBlock()->GetGraph();
1713 if (input->IsIntConstant()) {
1714 int32_t value = input->AsIntConstant()->GetValue();
1715 switch (GetResultType()) {
1716 case DataType::Type::kInt8:
1717 return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1718 case DataType::Type::kUint8:
1719 return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1720 case DataType::Type::kInt16:
1721 return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1722 case DataType::Type::kUint16:
1723 return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1724 case DataType::Type::kInt64:
1725 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1726 case DataType::Type::kFloat32:
1727 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1728 case DataType::Type::kFloat64:
1729 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1730 default:
1731 return nullptr;
1732 }
1733 } else if (input->IsLongConstant()) {
1734 int64_t value = input->AsLongConstant()->GetValue();
1735 switch (GetResultType()) {
1736 case DataType::Type::kInt8:
1737 return graph->GetIntConstant(static_cast<int8_t>(value), GetDexPc());
1738 case DataType::Type::kUint8:
1739 return graph->GetIntConstant(static_cast<uint8_t>(value), GetDexPc());
1740 case DataType::Type::kInt16:
1741 return graph->GetIntConstant(static_cast<int16_t>(value), GetDexPc());
1742 case DataType::Type::kUint16:
1743 return graph->GetIntConstant(static_cast<uint16_t>(value), GetDexPc());
1744 case DataType::Type::kInt32:
1745 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1746 case DataType::Type::kFloat32:
1747 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1748 case DataType::Type::kFloat64:
1749 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1750 default:
1751 return nullptr;
1752 }
1753 } else if (input->IsFloatConstant()) {
1754 float value = input->AsFloatConstant()->GetValue();
1755 switch (GetResultType()) {
1756 case DataType::Type::kInt32:
1757 if (std::isnan(value))
1758 return graph->GetIntConstant(0, GetDexPc());
1759 if (value >= static_cast<float>(kPrimIntMax))
1760 return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1761 if (value <= kPrimIntMin)
1762 return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1763 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1764 case DataType::Type::kInt64:
1765 if (std::isnan(value))
1766 return graph->GetLongConstant(0, GetDexPc());
1767 if (value >= static_cast<float>(kPrimLongMax))
1768 return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1769 if (value <= kPrimLongMin)
1770 return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1771 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1772 case DataType::Type::kFloat64:
1773 return graph->GetDoubleConstant(static_cast<double>(value), GetDexPc());
1774 default:
1775 return nullptr;
1776 }
1777 } else if (input->IsDoubleConstant()) {
1778 double value = input->AsDoubleConstant()->GetValue();
1779 switch (GetResultType()) {
1780 case DataType::Type::kInt32:
1781 if (std::isnan(value))
1782 return graph->GetIntConstant(0, GetDexPc());
1783 if (value >= kPrimIntMax)
1784 return graph->GetIntConstant(kPrimIntMax, GetDexPc());
1785 if (value <= kPrimLongMin)
1786 return graph->GetIntConstant(kPrimIntMin, GetDexPc());
1787 return graph->GetIntConstant(static_cast<int32_t>(value), GetDexPc());
1788 case DataType::Type::kInt64:
1789 if (std::isnan(value))
1790 return graph->GetLongConstant(0, GetDexPc());
1791 if (value >= static_cast<double>(kPrimLongMax))
1792 return graph->GetLongConstant(kPrimLongMax, GetDexPc());
1793 if (value <= kPrimLongMin)
1794 return graph->GetLongConstant(kPrimLongMin, GetDexPc());
1795 return graph->GetLongConstant(static_cast<int64_t>(value), GetDexPc());
1796 case DataType::Type::kFloat32:
1797 return graph->GetFloatConstant(static_cast<float>(value), GetDexPc());
1798 default:
1799 return nullptr;
1800 }
1801 }
1802 return nullptr;
1803 }
1804
TryStaticEvaluation() const1805 HConstant* HUnaryOperation::TryStaticEvaluation() const { return TryStaticEvaluation(GetInput()); }
1806
TryStaticEvaluation(HInstruction * input) const1807 HConstant* HUnaryOperation::TryStaticEvaluation(HInstruction* input) const {
1808 if (input->IsIntConstant()) {
1809 return Evaluate(input->AsIntConstant());
1810 } else if (input->IsLongConstant()) {
1811 return Evaluate(input->AsLongConstant());
1812 } else if (kEnableFloatingPointStaticEvaluation) {
1813 if (input->IsFloatConstant()) {
1814 return Evaluate(input->AsFloatConstant());
1815 } else if (input->IsDoubleConstant()) {
1816 return Evaluate(input->AsDoubleConstant());
1817 }
1818 }
1819 return nullptr;
1820 }
1821
TryStaticEvaluation() const1822 HConstant* HBinaryOperation::TryStaticEvaluation() const {
1823 return TryStaticEvaluation(GetLeft(), GetRight());
1824 }
1825
TryStaticEvaluation(HInstruction * left,HInstruction * right) const1826 HConstant* HBinaryOperation::TryStaticEvaluation(HInstruction* left, HInstruction* right) const {
1827 if (left->IsIntConstant() && right->IsIntConstant()) {
1828 return Evaluate(left->AsIntConstant(), right->AsIntConstant());
1829 } else if (left->IsLongConstant()) {
1830 if (right->IsIntConstant()) {
1831 // The binop(long, int) case is only valid for shifts and rotations.
1832 DCHECK(IsShl() || IsShr() || IsUShr() || IsRor()) << DebugName();
1833 return Evaluate(left->AsLongConstant(), right->AsIntConstant());
1834 } else if (right->IsLongConstant()) {
1835 return Evaluate(left->AsLongConstant(), right->AsLongConstant());
1836 }
1837 } else if (left->IsNullConstant() && right->IsNullConstant()) {
1838 // The binop(null, null) case is only valid for equal and not-equal conditions.
1839 DCHECK(IsEqual() || IsNotEqual()) << DebugName();
1840 return Evaluate(left->AsNullConstant(), right->AsNullConstant());
1841 } else if (kEnableFloatingPointStaticEvaluation) {
1842 if (left->IsFloatConstant() && right->IsFloatConstant()) {
1843 return Evaluate(left->AsFloatConstant(), right->AsFloatConstant());
1844 } else if (left->IsDoubleConstant() && right->IsDoubleConstant()) {
1845 return Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant());
1846 }
1847 }
1848 return nullptr;
1849 }
1850
GetConstantRight() const1851 HConstant* HBinaryOperation::GetConstantRight() const {
1852 if (GetRight()->IsConstant()) {
1853 return GetRight()->AsConstant();
1854 } else if (IsCommutative() && GetLeft()->IsConstant()) {
1855 return GetLeft()->AsConstant();
1856 } else {
1857 return nullptr;
1858 }
1859 }
1860
1861 // If `GetConstantRight()` returns one of the input, this returns the other
1862 // one. Otherwise it returns null.
GetLeastConstantLeft() const1863 HInstruction* HBinaryOperation::GetLeastConstantLeft() const {
1864 HInstruction* most_constant_right = GetConstantRight();
1865 if (most_constant_right == nullptr) {
1866 return nullptr;
1867 } else if (most_constant_right == GetLeft()) {
1868 return GetRight();
1869 } else {
1870 return GetLeft();
1871 }
1872 }
1873
operator <<(std::ostream & os,ComparisonBias rhs)1874 std::ostream& operator<<(std::ostream& os, ComparisonBias rhs) {
1875 // TODO: Replace with auto-generated operator<<.
1876 switch (rhs) {
1877 case ComparisonBias::kNoBias:
1878 return os << "none";
1879 case ComparisonBias::kGtBias:
1880 return os << "gt";
1881 case ComparisonBias::kLtBias:
1882 return os << "lt";
1883 }
1884 }
1885
IsBeforeWhenDisregardMoves(HInstruction * instruction) const1886 bool HCondition::IsBeforeWhenDisregardMoves(HInstruction* instruction) const {
1887 return this == instruction->GetPreviousDisregardingMoves();
1888 }
1889
Equals(const HInstruction * other) const1890 bool HInstruction::Equals(const HInstruction* other) const {
1891 if (GetKind() != other->GetKind()) return false;
1892 if (GetType() != other->GetType()) return false;
1893 if (!InstructionDataEquals(other)) return false;
1894 HConstInputsRef inputs = GetInputs();
1895 HConstInputsRef other_inputs = other->GetInputs();
1896 if (inputs.size() != other_inputs.size()) return false;
1897 for (size_t i = 0; i != inputs.size(); ++i) {
1898 if (inputs[i] != other_inputs[i]) return false;
1899 }
1900
1901 DCHECK_EQ(ComputeHashCode(), other->ComputeHashCode());
1902 return true;
1903 }
1904
operator <<(std::ostream & os,HInstruction::InstructionKind rhs)1905 std::ostream& operator<<(std::ostream& os, HInstruction::InstructionKind rhs) {
1906 #define DECLARE_CASE(type, super) case HInstruction::k##type: os << #type; break;
1907 switch (rhs) {
1908 FOR_EACH_CONCRETE_INSTRUCTION(DECLARE_CASE)
1909 default:
1910 os << "Unknown instruction kind " << static_cast<int>(rhs);
1911 break;
1912 }
1913 #undef DECLARE_CASE
1914 return os;
1915 }
1916
operator <<(std::ostream & os,const HInstruction::NoArgsDump rhs)1917 std::ostream& operator<<(std::ostream& os, const HInstruction::NoArgsDump rhs) {
1918 // TODO Really this should be const but that would require const-ifying
1919 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1920 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ false);
1921 }
1922
operator <<(std::ostream & os,const HInstruction::ArgsDump rhs)1923 std::ostream& operator<<(std::ostream& os, const HInstruction::ArgsDump rhs) {
1924 // TODO Really this should be const but that would require const-ifying
1925 // graph-visualizer and HGraphVisitor which are tangled up everywhere.
1926 return const_cast<HInstruction*>(rhs.ins)->Dump(os, /* dump_args= */ true);
1927 }
1928
operator <<(std::ostream & os,const HInstruction & rhs)1929 std::ostream& operator<<(std::ostream& os, const HInstruction& rhs) {
1930 return os << rhs.DumpWithoutArgs();
1931 }
1932
operator <<(std::ostream & os,const HUseList<HInstruction * > & lst)1933 std::ostream& operator<<(std::ostream& os, const HUseList<HInstruction*>& lst) {
1934 os << "Instructions[";
1935 bool first = true;
1936 for (const auto& hi : lst) {
1937 if (!first) {
1938 os << ", ";
1939 }
1940 first = false;
1941 os << hi.GetUser()->DebugName() << "[id: " << hi.GetUser()->GetId()
1942 << ", blk: " << hi.GetUser()->GetBlock()->GetBlockId() << "]@" << hi.GetIndex();
1943 }
1944 os << "]";
1945 return os;
1946 }
1947
operator <<(std::ostream & os,const HUseList<HEnvironment * > & lst)1948 std::ostream& operator<<(std::ostream& os, const HUseList<HEnvironment*>& lst) {
1949 os << "Environments[";
1950 bool first = true;
1951 for (const auto& hi : lst) {
1952 if (!first) {
1953 os << ", ";
1954 }
1955 first = false;
1956 os << *hi.GetUser()->GetHolder() << "@" << hi.GetIndex();
1957 }
1958 os << "]";
1959 return os;
1960 }
1961
Dump(std::ostream & os,CodeGenerator * codegen,std::optional<std::reference_wrapper<const BlockNamer>> namer)1962 std::ostream& HGraph::Dump(std::ostream& os,
1963 CodeGenerator* codegen,
1964 std::optional<std::reference_wrapper<const BlockNamer>> namer) {
1965 HGraphVisualizer vis(&os, this, codegen, namer);
1966 vis.DumpGraphDebug();
1967 return os;
1968 }
1969
MoveBefore(HInstruction * cursor,bool do_checks)1970 void HInstruction::MoveBefore(HInstruction* cursor, bool do_checks) {
1971 if (do_checks) {
1972 DCHECK(!IsPhi());
1973 DCHECK(!IsControlFlow());
1974 DCHECK(CanBeMoved() ||
1975 // HShouldDeoptimizeFlag can only be moved by CHAGuardOptimization.
1976 IsShouldDeoptimizeFlag());
1977 DCHECK(!cursor->IsPhi());
1978 }
1979
1980 next_->previous_ = previous_;
1981 if (previous_ != nullptr) {
1982 previous_->next_ = next_;
1983 }
1984 if (block_->instructions_.first_instruction_ == this) {
1985 block_->instructions_.first_instruction_ = next_;
1986 }
1987 DCHECK_NE(block_->instructions_.last_instruction_, this);
1988
1989 previous_ = cursor->previous_;
1990 if (previous_ != nullptr) {
1991 previous_->next_ = this;
1992 }
1993 next_ = cursor;
1994 cursor->previous_ = this;
1995 block_ = cursor->block_;
1996
1997 if (block_->instructions_.first_instruction_ == cursor) {
1998 block_->instructions_.first_instruction_ = this;
1999 }
2000 }
2001
MoveBeforeFirstUserAndOutOfLoops()2002 void HInstruction::MoveBeforeFirstUserAndOutOfLoops() {
2003 DCHECK(!CanThrow());
2004 DCHECK(!HasSideEffects());
2005 DCHECK(!HasEnvironmentUses());
2006 DCHECK(HasNonEnvironmentUses());
2007 DCHECK(!IsPhi()); // Makes no sense for Phi.
2008 DCHECK_EQ(InputCount(), 0u);
2009
2010 // Find the target block.
2011 auto uses_it = GetUses().begin();
2012 auto uses_end = GetUses().end();
2013 HBasicBlock* target_block = uses_it->GetUser()->GetBlock();
2014 ++uses_it;
2015 while (uses_it != uses_end && uses_it->GetUser()->GetBlock() == target_block) {
2016 ++uses_it;
2017 }
2018 if (uses_it != uses_end) {
2019 // This instruction has uses in two or more blocks. Find the common dominator.
2020 CommonDominator finder(target_block);
2021 for (; uses_it != uses_end; ++uses_it) {
2022 finder.Update(uses_it->GetUser()->GetBlock());
2023 }
2024 target_block = finder.Get();
2025 DCHECK(target_block != nullptr);
2026 }
2027 // Move to the first dominator not in a loop.
2028 while (target_block->IsInLoop()) {
2029 target_block = target_block->GetDominator();
2030 DCHECK(target_block != nullptr);
2031 }
2032
2033 // Find insertion position.
2034 HInstruction* insert_pos = nullptr;
2035 for (const HUseListNode<HInstruction*>& use : GetUses()) {
2036 if (use.GetUser()->GetBlock() == target_block &&
2037 (insert_pos == nullptr || use.GetUser()->StrictlyDominates(insert_pos))) {
2038 insert_pos = use.GetUser();
2039 }
2040 }
2041 if (insert_pos == nullptr) {
2042 // No user in `target_block`, insert before the control flow instruction.
2043 insert_pos = target_block->GetLastInstruction();
2044 DCHECK(insert_pos->IsControlFlow());
2045 // Avoid splitting HCondition from HIf to prevent unnecessary materialization.
2046 if (insert_pos->IsIf()) {
2047 HInstruction* if_input = insert_pos->AsIf()->InputAt(0);
2048 if (if_input == insert_pos->GetPrevious()) {
2049 insert_pos = if_input;
2050 }
2051 }
2052 }
2053 MoveBefore(insert_pos);
2054 }
2055
SplitBefore(HInstruction * cursor,bool require_graph_not_in_ssa_form)2056 HBasicBlock* HBasicBlock::SplitBefore(HInstruction* cursor, bool require_graph_not_in_ssa_form) {
2057 DCHECK_IMPLIES(require_graph_not_in_ssa_form, !graph_->IsInSsaForm())
2058 << "Support for SSA form not implemented.";
2059 DCHECK_EQ(cursor->GetBlock(), this);
2060
2061 HBasicBlock* new_block =
2062 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2063 new_block->instructions_.first_instruction_ = cursor;
2064 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2065 instructions_.last_instruction_ = cursor->previous_;
2066 if (cursor->previous_ == nullptr) {
2067 instructions_.first_instruction_ = nullptr;
2068 } else {
2069 cursor->previous_->next_ = nullptr;
2070 cursor->previous_ = nullptr;
2071 }
2072
2073 new_block->instructions_.SetBlockOfInstructions(new_block);
2074 AddInstruction(new (GetGraph()->GetAllocator()) HGoto(new_block->GetDexPc()));
2075
2076 for (HBasicBlock* successor : GetSuccessors()) {
2077 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2078 }
2079 new_block->successors_.swap(successors_);
2080 DCHECK(successors_.empty());
2081 AddSuccessor(new_block);
2082
2083 GetGraph()->AddBlock(new_block);
2084 return new_block;
2085 }
2086
CreateImmediateDominator()2087 HBasicBlock* HBasicBlock::CreateImmediateDominator() {
2088 DCHECK(!graph_->IsInSsaForm()) << "Support for SSA form not implemented.";
2089 DCHECK(!IsCatchBlock()) << "Support for updating try/catch information not implemented.";
2090
2091 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2092
2093 for (HBasicBlock* predecessor : GetPredecessors()) {
2094 predecessor->successors_[predecessor->GetSuccessorIndexOf(this)] = new_block;
2095 }
2096 new_block->predecessors_.swap(predecessors_);
2097 DCHECK(predecessors_.empty());
2098 AddPredecessor(new_block);
2099
2100 GetGraph()->AddBlock(new_block);
2101 return new_block;
2102 }
2103
SplitBeforeForInlining(HInstruction * cursor)2104 HBasicBlock* HBasicBlock::SplitBeforeForInlining(HInstruction* cursor) {
2105 DCHECK_EQ(cursor->GetBlock(), this);
2106
2107 HBasicBlock* new_block =
2108 new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), cursor->GetDexPc());
2109 new_block->instructions_.first_instruction_ = cursor;
2110 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2111 instructions_.last_instruction_ = cursor->previous_;
2112 if (cursor->previous_ == nullptr) {
2113 instructions_.first_instruction_ = nullptr;
2114 } else {
2115 cursor->previous_->next_ = nullptr;
2116 cursor->previous_ = nullptr;
2117 }
2118
2119 new_block->instructions_.SetBlockOfInstructions(new_block);
2120
2121 for (HBasicBlock* successor : GetSuccessors()) {
2122 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2123 }
2124 new_block->successors_.swap(successors_);
2125 DCHECK(successors_.empty());
2126
2127 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2128 dominated->dominator_ = new_block;
2129 }
2130 new_block->dominated_blocks_.swap(dominated_blocks_);
2131 DCHECK(dominated_blocks_.empty());
2132 return new_block;
2133 }
2134
SplitAfterForInlining(HInstruction * cursor)2135 HBasicBlock* HBasicBlock::SplitAfterForInlining(HInstruction* cursor) {
2136 DCHECK(!cursor->IsControlFlow());
2137 DCHECK_NE(instructions_.last_instruction_, cursor);
2138 DCHECK_EQ(cursor->GetBlock(), this);
2139
2140 HBasicBlock* new_block = new (GetGraph()->GetAllocator()) HBasicBlock(GetGraph(), GetDexPc());
2141 new_block->instructions_.first_instruction_ = cursor->GetNext();
2142 new_block->instructions_.last_instruction_ = instructions_.last_instruction_;
2143 cursor->next_->previous_ = nullptr;
2144 cursor->next_ = nullptr;
2145 instructions_.last_instruction_ = cursor;
2146
2147 new_block->instructions_.SetBlockOfInstructions(new_block);
2148 for (HBasicBlock* successor : GetSuccessors()) {
2149 successor->predecessors_[successor->GetPredecessorIndexOf(this)] = new_block;
2150 }
2151 new_block->successors_.swap(successors_);
2152 DCHECK(successors_.empty());
2153
2154 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2155 dominated->dominator_ = new_block;
2156 }
2157 new_block->dominated_blocks_.swap(dominated_blocks_);
2158 DCHECK(dominated_blocks_.empty());
2159 return new_block;
2160 }
2161
ComputeTryEntryOfSuccessors() const2162 const HTryBoundary* HBasicBlock::ComputeTryEntryOfSuccessors() const {
2163 if (EndsWithTryBoundary()) {
2164 HTryBoundary* try_boundary = GetLastInstruction()->AsTryBoundary();
2165 if (try_boundary->IsEntry()) {
2166 DCHECK(!IsTryBlock());
2167 return try_boundary;
2168 } else {
2169 DCHECK(IsTryBlock());
2170 DCHECK(try_catch_information_->GetTryEntry().HasSameExceptionHandlersAs(*try_boundary));
2171 return nullptr;
2172 }
2173 } else if (IsTryBlock()) {
2174 return &try_catch_information_->GetTryEntry();
2175 } else {
2176 return nullptr;
2177 }
2178 }
2179
HasThrowingInstructions() const2180 bool HBasicBlock::HasThrowingInstructions() const {
2181 for (HInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2182 if (it.Current()->CanThrow()) {
2183 return true;
2184 }
2185 }
2186 return false;
2187 }
2188
HasOnlyOneInstruction(const HBasicBlock & block)2189 static bool HasOnlyOneInstruction(const HBasicBlock& block) {
2190 return block.GetPhis().IsEmpty()
2191 && !block.GetInstructions().IsEmpty()
2192 && block.GetFirstInstruction() == block.GetLastInstruction();
2193 }
2194
IsSingleGoto() const2195 bool HBasicBlock::IsSingleGoto() const {
2196 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsGoto();
2197 }
2198
IsSingleReturn() const2199 bool HBasicBlock::IsSingleReturn() const {
2200 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsReturn();
2201 }
2202
IsSingleReturnOrReturnVoidAllowingPhis() const2203 bool HBasicBlock::IsSingleReturnOrReturnVoidAllowingPhis() const {
2204 return (GetFirstInstruction() == GetLastInstruction()) &&
2205 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2206 }
2207
IsSingleTryBoundary() const2208 bool HBasicBlock::IsSingleTryBoundary() const {
2209 return HasOnlyOneInstruction(*this) && GetLastInstruction()->IsTryBoundary();
2210 }
2211
EndsWithControlFlowInstruction() const2212 bool HBasicBlock::EndsWithControlFlowInstruction() const {
2213 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsControlFlow();
2214 }
2215
EndsWithReturn() const2216 bool HBasicBlock::EndsWithReturn() const {
2217 return !GetInstructions().IsEmpty() &&
2218 (GetLastInstruction()->IsReturn() || GetLastInstruction()->IsReturnVoid());
2219 }
2220
EndsWithIf() const2221 bool HBasicBlock::EndsWithIf() const {
2222 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsIf();
2223 }
2224
EndsWithTryBoundary() const2225 bool HBasicBlock::EndsWithTryBoundary() const {
2226 return !GetInstructions().IsEmpty() && GetLastInstruction()->IsTryBoundary();
2227 }
2228
HasSinglePhi() const2229 bool HBasicBlock::HasSinglePhi() const {
2230 return !GetPhis().IsEmpty() && GetFirstPhi()->GetNext() == nullptr;
2231 }
2232
GetNormalSuccessors() const2233 ArrayRef<HBasicBlock* const> HBasicBlock::GetNormalSuccessors() const {
2234 if (EndsWithTryBoundary()) {
2235 // The normal-flow successor of HTryBoundary is always stored at index zero.
2236 DCHECK_EQ(successors_[0], GetLastInstruction()->AsTryBoundary()->GetNormalFlowSuccessor());
2237 return ArrayRef<HBasicBlock* const>(successors_).SubArray(0u, 1u);
2238 } else {
2239 // All successors of blocks not ending with TryBoundary are normal.
2240 return ArrayRef<HBasicBlock* const>(successors_);
2241 }
2242 }
2243
GetExceptionalSuccessors() const2244 ArrayRef<HBasicBlock* const> HBasicBlock::GetExceptionalSuccessors() const {
2245 if (EndsWithTryBoundary()) {
2246 return GetLastInstruction()->AsTryBoundary()->GetExceptionHandlers();
2247 } else {
2248 // Blocks not ending with TryBoundary do not have exceptional successors.
2249 return ArrayRef<HBasicBlock* const>();
2250 }
2251 }
2252
HasSameExceptionHandlersAs(const HTryBoundary & other) const2253 bool HTryBoundary::HasSameExceptionHandlersAs(const HTryBoundary& other) const {
2254 ArrayRef<HBasicBlock* const> handlers1 = GetExceptionHandlers();
2255 ArrayRef<HBasicBlock* const> handlers2 = other.GetExceptionHandlers();
2256
2257 size_t length = handlers1.size();
2258 if (length != handlers2.size()) {
2259 return false;
2260 }
2261
2262 // Exception handlers need to be stored in the same order.
2263 for (size_t i = 0; i < length; ++i) {
2264 if (handlers1[i] != handlers2[i]) {
2265 return false;
2266 }
2267 }
2268 return true;
2269 }
2270
CountSize() const2271 size_t HInstructionList::CountSize() const {
2272 size_t size = 0;
2273 HInstruction* current = first_instruction_;
2274 for (; current != nullptr; current = current->GetNext()) {
2275 size++;
2276 }
2277 return size;
2278 }
2279
SetBlockOfInstructions(HBasicBlock * block) const2280 void HInstructionList::SetBlockOfInstructions(HBasicBlock* block) const {
2281 for (HInstruction* current = first_instruction_;
2282 current != nullptr;
2283 current = current->GetNext()) {
2284 current->SetBlock(block);
2285 }
2286 }
2287
AddAfter(HInstruction * cursor,const HInstructionList & instruction_list)2288 void HInstructionList::AddAfter(HInstruction* cursor, const HInstructionList& instruction_list) {
2289 DCHECK(Contains(cursor));
2290 if (!instruction_list.IsEmpty()) {
2291 if (cursor == last_instruction_) {
2292 last_instruction_ = instruction_list.last_instruction_;
2293 } else {
2294 cursor->next_->previous_ = instruction_list.last_instruction_;
2295 }
2296 instruction_list.last_instruction_->next_ = cursor->next_;
2297 cursor->next_ = instruction_list.first_instruction_;
2298 instruction_list.first_instruction_->previous_ = cursor;
2299 }
2300 }
2301
AddBefore(HInstruction * cursor,const HInstructionList & instruction_list)2302 void HInstructionList::AddBefore(HInstruction* cursor, const HInstructionList& instruction_list) {
2303 DCHECK(Contains(cursor));
2304 if (!instruction_list.IsEmpty()) {
2305 if (cursor == first_instruction_) {
2306 first_instruction_ = instruction_list.first_instruction_;
2307 } else {
2308 cursor->previous_->next_ = instruction_list.first_instruction_;
2309 }
2310 instruction_list.last_instruction_->next_ = cursor;
2311 instruction_list.first_instruction_->previous_ = cursor->previous_;
2312 cursor->previous_ = instruction_list.last_instruction_;
2313 }
2314 }
2315
Add(const HInstructionList & instruction_list)2316 void HInstructionList::Add(const HInstructionList& instruction_list) {
2317 if (IsEmpty()) {
2318 first_instruction_ = instruction_list.first_instruction_;
2319 last_instruction_ = instruction_list.last_instruction_;
2320 } else {
2321 AddAfter(last_instruction_, instruction_list);
2322 }
2323 }
2324
DisconnectAndDelete()2325 void HBasicBlock::DisconnectAndDelete() {
2326 // Dominators must be removed after all the blocks they dominate. This way
2327 // a loop header is removed last, a requirement for correct loop information
2328 // iteration.
2329 DCHECK(dominated_blocks_.empty());
2330
2331 // The following steps gradually remove the block from all its dependants in
2332 // post order (b/27683071).
2333
2334 // (1) Store a basic block that we'll use in step (5) to find loops to be updated.
2335 // We need to do this before step (4) which destroys the predecessor list.
2336 HBasicBlock* loop_update_start = this;
2337 if (IsLoopHeader()) {
2338 HLoopInformation* loop_info = GetLoopInformation();
2339 // All other blocks in this loop should have been removed because the header
2340 // was their dominator.
2341 // Note that we do not remove `this` from `loop_info` as it is unreachable.
2342 DCHECK(!loop_info->IsIrreducible());
2343 DCHECK_EQ(loop_info->GetBlocks().NumSetBits(), 1u);
2344 DCHECK_EQ(static_cast<uint32_t>(loop_info->GetBlocks().GetHighestBitSet()), GetBlockId());
2345 loop_update_start = loop_info->GetPreHeader();
2346 }
2347
2348 // (2) Disconnect the block from its successors and update their phis.
2349 DisconnectFromSuccessors();
2350
2351 // (3) Remove instructions and phis. Instructions should have no remaining uses
2352 // except in catch phis. If an instruction is used by a catch phi at `index`,
2353 // remove `index`-th input of all phis in the catch block since they are
2354 // guaranteed dead. Note that we may miss dead inputs this way but the
2355 // graph will always remain consistent.
2356 RemoveCatchPhiUsesAndInstruction(/* building_dominator_tree = */ false);
2357
2358 // (4) Disconnect the block from its predecessors and update their
2359 // control-flow instructions.
2360 for (HBasicBlock* predecessor : predecessors_) {
2361 // We should not see any back edges as they would have been removed by step (3).
2362 DCHECK_IMPLIES(IsInLoop(), !GetLoopInformation()->IsBackEdge(*predecessor));
2363
2364 HInstruction* last_instruction = predecessor->GetLastInstruction();
2365 if (last_instruction->IsTryBoundary() && !IsCatchBlock()) {
2366 // This block is the only normal-flow successor of the TryBoundary which
2367 // makes `predecessor` dead. Since DCE removes blocks in post order,
2368 // exception handlers of this TryBoundary were already visited and any
2369 // remaining handlers therefore must be live. We remove `predecessor` from
2370 // their list of predecessors.
2371 DCHECK_EQ(last_instruction->AsTryBoundary()->GetNormalFlowSuccessor(), this);
2372 while (predecessor->GetSuccessors().size() > 1) {
2373 HBasicBlock* handler = predecessor->GetSuccessors()[1];
2374 DCHECK(handler->IsCatchBlock());
2375 predecessor->RemoveSuccessor(handler);
2376 handler->RemovePredecessor(predecessor);
2377 }
2378 }
2379
2380 predecessor->RemoveSuccessor(this);
2381 uint32_t num_pred_successors = predecessor->GetSuccessors().size();
2382 if (num_pred_successors == 1u) {
2383 // If we have one successor after removing one, then we must have
2384 // had an HIf, HPackedSwitch or HTryBoundary, as they have more than one
2385 // successor. Replace those with a HGoto.
2386 DCHECK(last_instruction->IsIf() ||
2387 last_instruction->IsPackedSwitch() ||
2388 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2389 predecessor->RemoveInstruction(last_instruction);
2390 predecessor->AddInstruction(new (graph_->GetAllocator()) HGoto(last_instruction->GetDexPc()));
2391 } else if (num_pred_successors == 0u) {
2392 // The predecessor has no remaining successors and therefore must be dead.
2393 // We deliberately leave it without a control-flow instruction so that the
2394 // GraphChecker fails unless it is not removed during the pass too.
2395 predecessor->RemoveInstruction(last_instruction);
2396 } else {
2397 // There are multiple successors left. The removed block might be a successor
2398 // of a PackedSwitch which will be completely removed (perhaps replaced with
2399 // a Goto), or we are deleting a catch block from a TryBoundary. In either
2400 // case, leave `last_instruction` as is for now.
2401 DCHECK(last_instruction->IsPackedSwitch() ||
2402 (last_instruction->IsTryBoundary() && IsCatchBlock()));
2403 }
2404 }
2405 predecessors_.clear();
2406
2407 // (5) Remove the block from all loops it is included in. Skip the inner-most
2408 // loop if this is the loop header (see definition of `loop_update_start`)
2409 // because the loop header's predecessor list has been destroyed in step (4).
2410 for (HLoopInformationOutwardIterator it(*loop_update_start); !it.Done(); it.Advance()) {
2411 HLoopInformation* loop_info = it.Current();
2412 loop_info->Remove(this);
2413 if (loop_info->IsBackEdge(*this)) {
2414 // If this was the last back edge of the loop, we deliberately leave the
2415 // loop in an inconsistent state and will fail GraphChecker unless the
2416 // entire loop is removed during the pass.
2417 loop_info->RemoveBackEdge(this);
2418 }
2419 }
2420
2421 // (6) Disconnect from the dominator.
2422 dominator_->RemoveDominatedBlock(this);
2423 SetDominator(nullptr);
2424
2425 // (7) Delete from the graph, update reverse post order.
2426 graph_->DeleteDeadEmptyBlock(this);
2427 SetGraph(nullptr);
2428 }
2429
DisconnectFromSuccessors(const ArenaBitVector * visited)2430 void HBasicBlock::DisconnectFromSuccessors(const ArenaBitVector* visited) {
2431 for (HBasicBlock* successor : successors_) {
2432 // Delete this block from the list of predecessors.
2433 size_t this_index = successor->GetPredecessorIndexOf(this);
2434 successor->predecessors_.erase(successor->predecessors_.begin() + this_index);
2435
2436 if (visited != nullptr && !visited->IsBitSet(successor->GetBlockId())) {
2437 // `successor` itself is dead. Therefore, there is no need to update its phis.
2438 continue;
2439 }
2440
2441 DCHECK(!successor->predecessors_.empty());
2442
2443 // Remove this block's entries in the successor's phis. Skips exceptional
2444 // successors because catch phi inputs do not correspond to predecessor
2445 // blocks but throwing instructions. They are removed in `RemoveCatchPhiUses`.
2446 if (!successor->IsCatchBlock()) {
2447 if (successor->predecessors_.size() == 1u) {
2448 // The successor has just one predecessor left. Replace phis with the only
2449 // remaining input.
2450 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2451 HPhi* phi = phi_it.Current()->AsPhi();
2452 phi->ReplaceWith(phi->InputAt(1 - this_index));
2453 successor->RemovePhi(phi);
2454 }
2455 } else {
2456 for (HInstructionIterator phi_it(successor->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
2457 phi_it.Current()->AsPhi()->RemoveInputAt(this_index);
2458 }
2459 }
2460 }
2461 }
2462 successors_.clear();
2463 }
2464
RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree)2465 void HBasicBlock::RemoveCatchPhiUsesAndInstruction(bool building_dominator_tree) {
2466 for (HBackwardInstructionIterator it(GetInstructions()); !it.Done(); it.Advance()) {
2467 HInstruction* insn = it.Current();
2468 RemoveCatchPhiUsesOfDeadInstruction(insn);
2469
2470 // If we are building the dominator tree, we removed all input records previously.
2471 // `RemoveInstruction` will try to remove them again but that's not something we support and we
2472 // will crash. We check here since we won't be checking that in RemoveInstruction.
2473 if (building_dominator_tree) {
2474 DCHECK(insn->GetUses().empty());
2475 DCHECK(insn->GetEnvUses().empty());
2476 }
2477 RemoveInstruction(insn, /* ensure_safety= */ !building_dominator_tree);
2478 }
2479 for (HInstructionIterator it(GetPhis()); !it.Done(); it.Advance()) {
2480 HPhi* insn = it.Current()->AsPhi();
2481 RemoveCatchPhiUsesOfDeadInstruction(insn);
2482
2483 // If we are building the dominator tree, we removed all input records previously.
2484 // `RemovePhi` will try to remove them again but that's not something we support and we
2485 // will crash. We check here since we won't be checking that in RemovePhi.
2486 if (building_dominator_tree) {
2487 DCHECK(insn->GetUses().empty());
2488 DCHECK(insn->GetEnvUses().empty());
2489 }
2490 RemovePhi(insn, /* ensure_safety= */ !building_dominator_tree);
2491 }
2492 }
2493
MergeInstructionsWith(HBasicBlock * other)2494 void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
2495 DCHECK(EndsWithControlFlowInstruction());
2496 RemoveInstruction(GetLastInstruction());
2497 instructions_.Add(other->GetInstructions());
2498 other->instructions_.SetBlockOfInstructions(this);
2499 other->instructions_.Clear();
2500 }
2501
MergeWith(HBasicBlock * other)2502 void HBasicBlock::MergeWith(HBasicBlock* other) {
2503 DCHECK_EQ(GetGraph(), other->GetGraph());
2504 DCHECK(ContainsElement(dominated_blocks_, other));
2505 DCHECK_EQ(GetSingleSuccessor(), other);
2506 DCHECK_EQ(other->GetSinglePredecessor(), this);
2507 DCHECK(other->GetPhis().IsEmpty());
2508
2509 // Move instructions from `other` to `this`.
2510 MergeInstructionsWith(other);
2511
2512 // Remove `other` from the loops it is included in.
2513 for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
2514 HLoopInformation* loop_info = it.Current();
2515 loop_info->Remove(other);
2516 if (loop_info->IsBackEdge(*other)) {
2517 loop_info->ReplaceBackEdge(other, this);
2518 }
2519 }
2520
2521 // Update links to the successors of `other`.
2522 successors_.clear();
2523 for (HBasicBlock* successor : other->GetSuccessors()) {
2524 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2525 }
2526 successors_.swap(other->successors_);
2527 DCHECK(other->successors_.empty());
2528
2529 // Update the dominator tree.
2530 RemoveDominatedBlock(other);
2531 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2532 dominated->SetDominator(this);
2533 }
2534 dominated_blocks_.insert(
2535 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2536 other->dominated_blocks_.clear();
2537 other->dominator_ = nullptr;
2538
2539 // Clear the list of predecessors of `other` in preparation of deleting it.
2540 other->predecessors_.clear();
2541
2542 // Delete `other` from the graph. The function updates reverse post order.
2543 graph_->DeleteDeadEmptyBlock(other);
2544 other->SetGraph(nullptr);
2545 }
2546
MergeWithInlined(HBasicBlock * other)2547 void HBasicBlock::MergeWithInlined(HBasicBlock* other) {
2548 DCHECK_NE(GetGraph(), other->GetGraph());
2549 DCHECK(GetDominatedBlocks().empty());
2550 DCHECK(GetSuccessors().empty());
2551 DCHECK(!EndsWithControlFlowInstruction());
2552 DCHECK(other->GetSinglePredecessor()->IsEntryBlock());
2553 DCHECK(other->GetPhis().IsEmpty());
2554 DCHECK(!other->IsInLoop());
2555
2556 // Move instructions from `other` to `this`.
2557 instructions_.Add(other->GetInstructions());
2558 other->instructions_.SetBlockOfInstructions(this);
2559
2560 // Update links to the successors of `other`.
2561 successors_.clear();
2562 for (HBasicBlock* successor : other->GetSuccessors()) {
2563 successor->predecessors_[successor->GetPredecessorIndexOf(other)] = this;
2564 }
2565 successors_.swap(other->successors_);
2566 DCHECK(other->successors_.empty());
2567
2568 // Update the dominator tree.
2569 for (HBasicBlock* dominated : other->GetDominatedBlocks()) {
2570 dominated->SetDominator(this);
2571 }
2572 dominated_blocks_.insert(
2573 dominated_blocks_.end(), other->dominated_blocks_.begin(), other->dominated_blocks_.end());
2574 other->dominated_blocks_.clear();
2575 other->dominator_ = nullptr;
2576 other->graph_ = nullptr;
2577 }
2578
ReplaceWith(HBasicBlock * other)2579 void HBasicBlock::ReplaceWith(HBasicBlock* other) {
2580 while (!GetPredecessors().empty()) {
2581 HBasicBlock* predecessor = GetPredecessors()[0];
2582 predecessor->ReplaceSuccessor(this, other);
2583 }
2584 while (!GetSuccessors().empty()) {
2585 HBasicBlock* successor = GetSuccessors()[0];
2586 successor->ReplacePredecessor(this, other);
2587 }
2588 for (HBasicBlock* dominated : GetDominatedBlocks()) {
2589 other->AddDominatedBlock(dominated);
2590 }
2591 GetDominator()->ReplaceDominatedBlock(this, other);
2592 other->SetDominator(GetDominator());
2593 dominator_ = nullptr;
2594 graph_ = nullptr;
2595 }
2596
DeleteDeadEmptyBlock(HBasicBlock * block)2597 void HGraph::DeleteDeadEmptyBlock(HBasicBlock* block) {
2598 DCHECK_EQ(block->GetGraph(), this);
2599 DCHECK(block->GetSuccessors().empty());
2600 DCHECK(block->GetPredecessors().empty());
2601 DCHECK(block->GetDominatedBlocks().empty());
2602 DCHECK(block->GetDominator() == nullptr);
2603 DCHECK(block->GetInstructions().IsEmpty());
2604 DCHECK(block->GetPhis().IsEmpty());
2605
2606 if (block->IsExitBlock()) {
2607 SetExitBlock(nullptr);
2608 }
2609
2610 RemoveElement(reverse_post_order_, block);
2611 blocks_[block->GetBlockId()] = nullptr;
2612 block->SetGraph(nullptr);
2613 }
2614
UpdateLoopAndTryInformationOfNewBlock(HBasicBlock * block,HBasicBlock * reference,bool replace_if_back_edge,bool has_more_specific_try_catch_info)2615 void HGraph::UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
2616 HBasicBlock* reference,
2617 bool replace_if_back_edge,
2618 bool has_more_specific_try_catch_info) {
2619 if (block->IsLoopHeader()) {
2620 // Clear the information of which blocks are contained in that loop. Since the
2621 // information is stored as a bit vector based on block ids, we have to update
2622 // it, as those block ids were specific to the callee graph and we are now adding
2623 // these blocks to the caller graph.
2624 block->GetLoopInformation()->ClearAllBlocks();
2625 }
2626
2627 // If not already in a loop, update the loop information.
2628 if (!block->IsInLoop()) {
2629 block->SetLoopInformation(reference->GetLoopInformation());
2630 }
2631
2632 // If the block is in a loop, update all its outward loops.
2633 HLoopInformation* loop_info = block->GetLoopInformation();
2634 if (loop_info != nullptr) {
2635 for (HLoopInformationOutwardIterator loop_it(*block);
2636 !loop_it.Done();
2637 loop_it.Advance()) {
2638 loop_it.Current()->Add(block);
2639 }
2640 if (replace_if_back_edge && loop_info->IsBackEdge(*reference)) {
2641 loop_info->ReplaceBackEdge(reference, block);
2642 }
2643 }
2644
2645 DCHECK_IMPLIES(has_more_specific_try_catch_info, !reference->IsTryBlock())
2646 << "We don't allow to inline try catches inside of other try blocks.";
2647
2648 // Update the TryCatchInformation, if we are not inlining a try catch.
2649 if (!has_more_specific_try_catch_info) {
2650 // Copy TryCatchInformation if `reference` is a try block, not if it is a catch block.
2651 TryCatchInformation* try_catch_info =
2652 reference->IsTryBlock() ? reference->GetTryCatchInformation() : nullptr;
2653 block->SetTryCatchInformation(try_catch_info);
2654 }
2655 }
2656
InlineInto(HGraph * outer_graph,HInvoke * invoke)2657 HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
2658 DCHECK(HasExitBlock()) << "Unimplemented scenario";
2659 // Update the environments in this graph to have the invoke's environment
2660 // as parent.
2661 {
2662 // Skip the entry block, we do not need to update the entry's suspend check.
2663 for (HBasicBlock* block : GetReversePostOrderSkipEntryBlock()) {
2664 for (HInstructionIterator instr_it(block->GetInstructions());
2665 !instr_it.Done();
2666 instr_it.Advance()) {
2667 HInstruction* current = instr_it.Current();
2668 if (current->NeedsEnvironment()) {
2669 DCHECK(current->HasEnvironment());
2670 current->GetEnvironment()->SetAndCopyParentChain(
2671 outer_graph->GetAllocator(), invoke->GetEnvironment());
2672 }
2673 }
2674 }
2675 }
2676 outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs());
2677
2678 if (HasBoundsChecks()) {
2679 outer_graph->SetHasBoundsChecks(true);
2680 }
2681 if (HasLoops()) {
2682 outer_graph->SetHasLoops(true);
2683 }
2684 if (HasIrreducibleLoops()) {
2685 outer_graph->SetHasIrreducibleLoops(true);
2686 }
2687 if (HasDirectCriticalNativeCall()) {
2688 outer_graph->SetHasDirectCriticalNativeCall(true);
2689 }
2690 if (HasTryCatch()) {
2691 outer_graph->SetHasTryCatch(true);
2692 }
2693 if (HasMonitorOperations()) {
2694 outer_graph->SetHasMonitorOperations(true);
2695 }
2696 if (HasTraditionalSIMD()) {
2697 outer_graph->SetHasTraditionalSIMD(true);
2698 }
2699 if (HasPredicatedSIMD()) {
2700 outer_graph->SetHasPredicatedSIMD(true);
2701 }
2702 if (HasAlwaysThrowingInvokes()) {
2703 outer_graph->SetHasAlwaysThrowingInvokes(true);
2704 }
2705
2706 HInstruction* return_value = nullptr;
2707 if (GetBlocks().size() == 3) {
2708 // Inliner already made sure we don't inline methods that always throw.
2709 DCHECK(!GetBlocks()[1]->GetLastInstruction()->IsThrow());
2710 // Simple case of an entry block, a body block, and an exit block.
2711 // Put the body block's instruction into `invoke`'s block.
2712 HBasicBlock* body = GetBlocks()[1];
2713 DCHECK(GetBlocks()[0]->IsEntryBlock());
2714 DCHECK(GetBlocks()[2]->IsExitBlock());
2715 DCHECK(!body->IsExitBlock());
2716 DCHECK(!body->IsInLoop());
2717 HInstruction* last = body->GetLastInstruction();
2718
2719 // Note that we add instructions before the invoke only to simplify polymorphic inlining.
2720 invoke->GetBlock()->instructions_.AddBefore(invoke, body->GetInstructions());
2721 body->GetInstructions().SetBlockOfInstructions(invoke->GetBlock());
2722
2723 // Replace the invoke with the return value of the inlined graph.
2724 if (last->IsReturn()) {
2725 return_value = last->InputAt(0);
2726 } else {
2727 DCHECK(last->IsReturnVoid());
2728 }
2729
2730 invoke->GetBlock()->RemoveInstruction(last);
2731 } else {
2732 // Need to inline multiple blocks. We split `invoke`'s block
2733 // into two blocks, merge the first block of the inlined graph into
2734 // the first half, and replace the exit block of the inlined graph
2735 // with the second half.
2736 ArenaAllocator* allocator = outer_graph->GetAllocator();
2737 HBasicBlock* at = invoke->GetBlock();
2738 // Note that we split before the invoke only to simplify polymorphic inlining.
2739 HBasicBlock* to = at->SplitBeforeForInlining(invoke);
2740
2741 HBasicBlock* first = entry_block_->GetSuccessors()[0];
2742 DCHECK(!first->IsInLoop());
2743 DCHECK(first->GetTryCatchInformation() == nullptr);
2744 at->MergeWithInlined(first);
2745 exit_block_->ReplaceWith(to);
2746
2747 // Update the meta information surrounding blocks:
2748 // (1) the graph they are now in,
2749 // (2) the reverse post order of that graph,
2750 // (3) their potential loop information, inner and outer,
2751 // (4) try block membership.
2752 // Note that we do not need to update catch phi inputs because they
2753 // correspond to the register file of the outer method which the inlinee
2754 // cannot modify.
2755
2756 // We don't add the entry block, the exit block, and the first block, which
2757 // has been merged with `at`.
2758 static constexpr int kNumberOfSkippedBlocksInCallee = 3;
2759
2760 // We add the `to` block.
2761 static constexpr int kNumberOfNewBlocksInCaller = 1;
2762 size_t blocks_added = (reverse_post_order_.size() - kNumberOfSkippedBlocksInCallee)
2763 + kNumberOfNewBlocksInCaller;
2764
2765 // Find the location of `at` in the outer graph's reverse post order. The new
2766 // blocks will be added after it.
2767 size_t index_of_at = IndexOfElement(outer_graph->reverse_post_order_, at);
2768 MakeRoomFor(&outer_graph->reverse_post_order_, blocks_added, index_of_at);
2769
2770 // Do a reverse post order of the blocks in the callee and do (1), (2), (3)
2771 // and (4) to the blocks that apply.
2772 for (HBasicBlock* current : GetReversePostOrder()) {
2773 if (current != exit_block_ && current != entry_block_ && current != first) {
2774 DCHECK(current->GetGraph() == this);
2775 current->SetGraph(outer_graph);
2776 outer_graph->AddBlock(current);
2777 outer_graph->reverse_post_order_[++index_of_at] = current;
2778 UpdateLoopAndTryInformationOfNewBlock(current,
2779 at,
2780 /* replace_if_back_edge= */ false,
2781 current->GetTryCatchInformation() != nullptr);
2782 }
2783 }
2784
2785 // Do (1), (2), (3) and (4) to `to`.
2786 to->SetGraph(outer_graph);
2787 outer_graph->AddBlock(to);
2788 outer_graph->reverse_post_order_[++index_of_at] = to;
2789 // Only `to` can become a back edge, as the inlined blocks
2790 // are predecessors of `to`.
2791 UpdateLoopAndTryInformationOfNewBlock(to, at, /* replace_if_back_edge= */ true);
2792
2793 // Update all predecessors of the exit block (now the `to` block)
2794 // to not `HReturn` but `HGoto` instead. Special case throwing blocks
2795 // to now get the outer graph exit block as successor.
2796 HPhi* return_value_phi = nullptr;
2797 bool rerun_dominance = false;
2798 bool rerun_loop_analysis = false;
2799 for (size_t pred = 0; pred < to->GetPredecessors().size(); ++pred) {
2800 HBasicBlock* predecessor = to->GetPredecessors()[pred];
2801 HInstruction* last = predecessor->GetLastInstruction();
2802
2803 // At this point we might either have:
2804 // A) Return/ReturnVoid/Throw as the last instruction, or
2805 // B) `Return/ReturnVoid/Throw->TryBoundary` as the last instruction chain
2806
2807 const bool saw_try_boundary = last->IsTryBoundary();
2808 if (saw_try_boundary) {
2809 DCHECK(predecessor->IsSingleTryBoundary());
2810 DCHECK(!last->AsTryBoundary()->IsEntry());
2811 predecessor = predecessor->GetSinglePredecessor();
2812 last = predecessor->GetLastInstruction();
2813 }
2814
2815 if (last->IsThrow()) {
2816 if (at->IsTryBlock()) {
2817 DCHECK(!saw_try_boundary) << "We don't support inlining of try blocks into try blocks.";
2818 // Create a TryBoundary of kind:exit and point it to the Exit block.
2819 HBasicBlock* new_block = outer_graph->SplitEdge(predecessor, to);
2820 new_block->AddInstruction(
2821 new (allocator) HTryBoundary(HTryBoundary::BoundaryKind::kExit, last->GetDexPc()));
2822 new_block->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2823
2824 // Copy information from the predecessor.
2825 new_block->SetLoopInformation(predecessor->GetLoopInformation());
2826 TryCatchInformation* try_catch_info = predecessor->GetTryCatchInformation();
2827 new_block->SetTryCatchInformation(try_catch_info);
2828 for (HBasicBlock* xhandler :
2829 try_catch_info->GetTryEntry().GetBlock()->GetExceptionalSuccessors()) {
2830 new_block->AddSuccessor(xhandler);
2831 }
2832 DCHECK(try_catch_info->GetTryEntry().HasSameExceptionHandlersAs(
2833 *new_block->GetLastInstruction()->AsTryBoundary()));
2834 } else {
2835 // We either have `Throw->TryBoundary` or `Throw`. We want to point the whole chain to the
2836 // exit, so we recompute `predecessor`
2837 predecessor = to->GetPredecessors()[pred];
2838 predecessor->ReplaceSuccessor(to, outer_graph->GetExitBlock());
2839 }
2840
2841 --pred;
2842 // We need to re-run dominance information, as the exit block now has
2843 // a new predecessor and potential new dominator.
2844 // TODO(solanes): See if it's worth it to hand-modify the domination chain instead of
2845 // rerunning the dominance for the whole graph.
2846 rerun_dominance = true;
2847 if (predecessor->GetLoopInformation() != nullptr) {
2848 // The loop information might have changed e.g. `predecessor` might not be in a loop
2849 // anymore. We only do this if `predecessor` has loop information as it is impossible for
2850 // predecessor to end up in a loop if it wasn't in one before.
2851 rerun_loop_analysis = true;
2852 }
2853 } else {
2854 if (last->IsReturnVoid()) {
2855 DCHECK(return_value == nullptr);
2856 DCHECK(return_value_phi == nullptr);
2857 } else {
2858 DCHECK(last->IsReturn());
2859 if (return_value_phi != nullptr) {
2860 return_value_phi->AddInput(last->InputAt(0));
2861 } else if (return_value == nullptr) {
2862 return_value = last->InputAt(0);
2863 } else {
2864 // There will be multiple returns.
2865 return_value_phi = new (allocator) HPhi(
2866 allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke->GetType()), to->GetDexPc());
2867 to->AddPhi(return_value_phi);
2868 return_value_phi->AddInput(return_value);
2869 return_value_phi->AddInput(last->InputAt(0));
2870 return_value = return_value_phi;
2871 }
2872 }
2873 predecessor->AddInstruction(new (allocator) HGoto(last->GetDexPc()));
2874 predecessor->RemoveInstruction(last);
2875
2876 if (saw_try_boundary) {
2877 predecessor = to->GetPredecessors()[pred];
2878 DCHECK(predecessor->EndsWithTryBoundary());
2879 DCHECK_EQ(predecessor->GetNormalSuccessors().size(), 1u);
2880 if (predecessor->GetSuccessors()[0]->GetPredecessors().size() > 1) {
2881 outer_graph->SplitCriticalEdge(predecessor, to);
2882 rerun_dominance = true;
2883 if (predecessor->GetLoopInformation() != nullptr) {
2884 rerun_loop_analysis = true;
2885 }
2886 }
2887 }
2888 }
2889 }
2890 if (rerun_loop_analysis) {
2891 outer_graph->RecomputeDominatorTree();
2892 } else if (rerun_dominance) {
2893 outer_graph->ClearDominanceInformation();
2894 outer_graph->ComputeDominanceInformation();
2895 }
2896 }
2897
2898 // Walk over the entry block and:
2899 // - Move constants from the entry block to the outer_graph's entry block,
2900 // - Replace HParameterValue instructions with their real value.
2901 // - Remove suspend checks, that hold an environment.
2902 // We must do this after the other blocks have been inlined, otherwise ids of
2903 // constants could overlap with the inner graph.
2904 size_t parameter_index = 0;
2905 for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
2906 HInstruction* current = it.Current();
2907 HInstruction* replacement = nullptr;
2908 if (current->IsNullConstant()) {
2909 replacement = outer_graph->GetNullConstant(current->GetDexPc());
2910 } else if (current->IsIntConstant()) {
2911 replacement = outer_graph->GetIntConstant(
2912 current->AsIntConstant()->GetValue(), current->GetDexPc());
2913 } else if (current->IsLongConstant()) {
2914 replacement = outer_graph->GetLongConstant(
2915 current->AsLongConstant()->GetValue(), current->GetDexPc());
2916 } else if (current->IsFloatConstant()) {
2917 replacement = outer_graph->GetFloatConstant(
2918 current->AsFloatConstant()->GetValue(), current->GetDexPc());
2919 } else if (current->IsDoubleConstant()) {
2920 replacement = outer_graph->GetDoubleConstant(
2921 current->AsDoubleConstant()->GetValue(), current->GetDexPc());
2922 } else if (current->IsParameterValue()) {
2923 if (kIsDebugBuild &&
2924 invoke->IsInvokeStaticOrDirect() &&
2925 invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
2926 // Ensure we do not use the last input of `invoke`, as it
2927 // contains a clinit check which is not an actual argument.
2928 size_t last_input_index = invoke->InputCount() - 1;
2929 DCHECK(parameter_index != last_input_index);
2930 }
2931 replacement = invoke->InputAt(parameter_index++);
2932 } else if (current->IsCurrentMethod()) {
2933 replacement = outer_graph->GetCurrentMethod();
2934 } else {
2935 // It is OK to ignore MethodEntryHook for inlined functions.
2936 // In debug mode we don't inline and in release mode method
2937 // tracing is best effort so OK to ignore them.
2938 DCHECK(current->IsGoto() || current->IsSuspendCheck() || current->IsMethodEntryHook());
2939 entry_block_->RemoveInstruction(current);
2940 }
2941 if (replacement != nullptr) {
2942 current->ReplaceWith(replacement);
2943 // If the current is the return value then we need to update the latter.
2944 if (current == return_value) {
2945 DCHECK_EQ(entry_block_, return_value->GetBlock());
2946 return_value = replacement;
2947 }
2948 }
2949 }
2950
2951 return return_value;
2952 }
2953
2954 /*
2955 * Loop will be transformed to:
2956 * old_pre_header
2957 * |
2958 * if_block
2959 * / \
2960 * true_block false_block
2961 * \ /
2962 * new_pre_header
2963 * |
2964 * header
2965 */
TransformLoopHeaderForBCE(HBasicBlock * header)2966 void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
2967 DCHECK(header->IsLoopHeader());
2968 HBasicBlock* old_pre_header = header->GetDominator();
2969
2970 // Need extra block to avoid critical edge.
2971 HBasicBlock* if_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2972 HBasicBlock* true_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2973 HBasicBlock* false_block = new (allocator_) HBasicBlock(this, header->GetDexPc());
2974 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
2975 AddBlock(if_block);
2976 AddBlock(true_block);
2977 AddBlock(false_block);
2978 AddBlock(new_pre_header);
2979
2980 header->ReplacePredecessor(old_pre_header, new_pre_header);
2981 old_pre_header->successors_.clear();
2982 old_pre_header->dominated_blocks_.clear();
2983
2984 old_pre_header->AddSuccessor(if_block);
2985 if_block->AddSuccessor(true_block); // True successor
2986 if_block->AddSuccessor(false_block); // False successor
2987 true_block->AddSuccessor(new_pre_header);
2988 false_block->AddSuccessor(new_pre_header);
2989
2990 old_pre_header->dominated_blocks_.push_back(if_block);
2991 if_block->SetDominator(old_pre_header);
2992 if_block->dominated_blocks_.push_back(true_block);
2993 true_block->SetDominator(if_block);
2994 if_block->dominated_blocks_.push_back(false_block);
2995 false_block->SetDominator(if_block);
2996 if_block->dominated_blocks_.push_back(new_pre_header);
2997 new_pre_header->SetDominator(if_block);
2998 new_pre_header->dominated_blocks_.push_back(header);
2999 header->SetDominator(new_pre_header);
3000
3001 // Fix reverse post order.
3002 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3003 MakeRoomFor(&reverse_post_order_, 4, index_of_header - 1);
3004 reverse_post_order_[index_of_header++] = if_block;
3005 reverse_post_order_[index_of_header++] = true_block;
3006 reverse_post_order_[index_of_header++] = false_block;
3007 reverse_post_order_[index_of_header++] = new_pre_header;
3008
3009 // The pre_header can never be a back edge of a loop.
3010 DCHECK((old_pre_header->GetLoopInformation() == nullptr) ||
3011 !old_pre_header->GetLoopInformation()->IsBackEdge(*old_pre_header));
3012 UpdateLoopAndTryInformationOfNewBlock(
3013 if_block, old_pre_header, /* replace_if_back_edge= */ false);
3014 UpdateLoopAndTryInformationOfNewBlock(
3015 true_block, old_pre_header, /* replace_if_back_edge= */ false);
3016 UpdateLoopAndTryInformationOfNewBlock(
3017 false_block, old_pre_header, /* replace_if_back_edge= */ false);
3018 UpdateLoopAndTryInformationOfNewBlock(
3019 new_pre_header, old_pre_header, /* replace_if_back_edge= */ false);
3020 }
3021
3022 // Creates a new two-basic-block loop and inserts it between original loop header and
3023 // original loop exit; also adjusts dominators, post order and new LoopInformation.
TransformLoopForVectorization(HBasicBlock * header,HBasicBlock * body,HBasicBlock * exit)3024 HBasicBlock* HGraph::TransformLoopForVectorization(HBasicBlock* header,
3025 HBasicBlock* body,
3026 HBasicBlock* exit) {
3027 DCHECK(header->IsLoopHeader());
3028 HLoopInformation* loop = header->GetLoopInformation();
3029
3030 // Add new loop blocks.
3031 HBasicBlock* new_pre_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3032 HBasicBlock* new_header = new (allocator_) HBasicBlock(this, header->GetDexPc());
3033 HBasicBlock* new_body = new (allocator_) HBasicBlock(this, header->GetDexPc());
3034 AddBlock(new_pre_header);
3035 AddBlock(new_header);
3036 AddBlock(new_body);
3037
3038 // Set up control flow.
3039 header->ReplaceSuccessor(exit, new_pre_header);
3040 new_pre_header->AddSuccessor(new_header);
3041 new_header->AddSuccessor(exit);
3042 new_header->AddSuccessor(new_body);
3043 new_body->AddSuccessor(new_header);
3044
3045 // Set up dominators.
3046 header->ReplaceDominatedBlock(exit, new_pre_header);
3047 new_pre_header->SetDominator(header);
3048 new_pre_header->dominated_blocks_.push_back(new_header);
3049 new_header->SetDominator(new_pre_header);
3050 new_header->dominated_blocks_.push_back(new_body);
3051 new_body->SetDominator(new_header);
3052 new_header->dominated_blocks_.push_back(exit);
3053 exit->SetDominator(new_header);
3054
3055 // Fix reverse post order.
3056 size_t index_of_header = IndexOfElement(reverse_post_order_, header);
3057 MakeRoomFor(&reverse_post_order_, 2, index_of_header);
3058 reverse_post_order_[++index_of_header] = new_pre_header;
3059 reverse_post_order_[++index_of_header] = new_header;
3060 size_t index_of_body = IndexOfElement(reverse_post_order_, body);
3061 MakeRoomFor(&reverse_post_order_, 1, index_of_body - 1);
3062 reverse_post_order_[index_of_body] = new_body;
3063
3064 // Add gotos and suspend check (client must add conditional in header).
3065 new_pre_header->AddInstruction(new (allocator_) HGoto());
3066 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(header->GetDexPc());
3067 new_header->AddInstruction(suspend_check);
3068 new_body->AddInstruction(new (allocator_) HGoto());
3069 DCHECK(loop->GetSuspendCheck() != nullptr);
3070 suspend_check->CopyEnvironmentFromWithLoopPhiAdjustment(
3071 loop->GetSuspendCheck()->GetEnvironment(), header);
3072
3073 // Update loop information.
3074 new_header->AddBackEdge(new_body);
3075 new_header->GetLoopInformation()->SetSuspendCheck(suspend_check);
3076 new_header->GetLoopInformation()->Populate();
3077 new_pre_header->SetLoopInformation(loop->GetPreHeader()->GetLoopInformation()); // outward
3078 HLoopInformationOutwardIterator it(*new_header);
3079 for (it.Advance(); !it.Done(); it.Advance()) {
3080 it.Current()->Add(new_pre_header);
3081 it.Current()->Add(new_header);
3082 it.Current()->Add(new_body);
3083 }
3084 return new_pre_header;
3085 }
3086
CheckAgainstUpperBound(ReferenceTypeInfo rti,ReferenceTypeInfo upper_bound_rti)3087 static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti)
3088 REQUIRES_SHARED(Locks::mutator_lock_) {
3089 if (rti.IsValid()) {
3090 DCHECK(upper_bound_rti.IsSupertypeOf(rti))
3091 << " upper_bound_rti: " << upper_bound_rti
3092 << " rti: " << rti;
3093 DCHECK_IMPLIES(upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes(), rti.IsExact())
3094 << " upper_bound_rti: " << upper_bound_rti
3095 << " rti: " << rti;
3096 }
3097 }
3098
SetReferenceTypeInfo(ReferenceTypeInfo rti)3099 void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
3100 if (kIsDebugBuild) {
3101 DCHECK_EQ(GetType(), DataType::Type::kReference);
3102 ScopedObjectAccess soa(Thread::Current());
3103 DCHECK(rti.IsValid()) << "Invalid RTI for " << DebugName();
3104 if (IsBoundType()) {
3105 // Having the test here spares us from making the method virtual just for
3106 // the sake of a DCHECK.
3107 CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
3108 }
3109 }
3110 reference_type_handle_ = rti.GetTypeHandle();
3111 SetPackedFlag<kFlagReferenceTypeIsExact>(rti.IsExact());
3112 }
3113
SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti)3114 void HInstruction::SetReferenceTypeInfoIfValid(ReferenceTypeInfo rti) {
3115 if (rti.IsValid()) {
3116 SetReferenceTypeInfo(rti);
3117 }
3118 }
3119
InstructionDataEquals(const HInstruction * other) const3120 bool HBoundType::InstructionDataEquals(const HInstruction* other) const {
3121 const HBoundType* other_bt = other->AsBoundType();
3122 ScopedObjectAccess soa(Thread::Current());
3123 return GetUpperBound().IsEqual(other_bt->GetUpperBound()) &&
3124 GetUpperCanBeNull() == other_bt->GetUpperCanBeNull() &&
3125 CanBeNull() == other_bt->CanBeNull();
3126 }
3127
SetUpperBound(const ReferenceTypeInfo & upper_bound,bool can_be_null)3128 void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
3129 if (kIsDebugBuild) {
3130 ScopedObjectAccess soa(Thread::Current());
3131 DCHECK(upper_bound.IsValid());
3132 DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
3133 CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
3134 }
3135 upper_bound_ = upper_bound;
3136 SetPackedFlag<kFlagUpperCanBeNull>(can_be_null);
3137 }
3138
Create(TypeHandle type_handle,bool is_exact)3139 ReferenceTypeInfo ReferenceTypeInfo::Create(TypeHandle type_handle, bool is_exact) {
3140 if (kIsDebugBuild) {
3141 ScopedObjectAccess soa(Thread::Current());
3142 DCHECK(IsValidHandle(type_handle));
3143 if (!is_exact) {
3144 DCHECK(!type_handle->CannotBeAssignedFromOtherTypes())
3145 << "Callers of ReferenceTypeInfo::Create should ensure is_exact is properly computed";
3146 }
3147 }
3148 return ReferenceTypeInfo(type_handle, is_exact);
3149 }
3150
operator <<(std::ostream & os,const ReferenceTypeInfo & rhs)3151 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs) {
3152 ScopedObjectAccess soa(Thread::Current());
3153 os << "["
3154 << " is_valid=" << rhs.IsValid()
3155 << " type=" << (!rhs.IsValid() ? "?" : mirror::Class::PrettyClass(rhs.GetTypeHandle().Get()))
3156 << " is_exact=" << rhs.IsExact()
3157 << " ]";
3158 return os;
3159 }
3160
HasAnyEnvironmentUseBefore(HInstruction * other)3161 bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
3162 // For now, assume that instructions in different blocks may use the
3163 // environment.
3164 // TODO: Use the control flow to decide if this is true.
3165 if (GetBlock() != other->GetBlock()) {
3166 return true;
3167 }
3168
3169 // We know that we are in the same block. Walk from 'this' to 'other',
3170 // checking to see if there is any instruction with an environment.
3171 HInstruction* current = this;
3172 for (; current != other && current != nullptr; current = current->GetNext()) {
3173 // This is a conservative check, as the instruction result may not be in
3174 // the referenced environment.
3175 if (current->HasEnvironment()) {
3176 return true;
3177 }
3178 }
3179
3180 // We should have been called with 'this' before 'other' in the block.
3181 // Just confirm this.
3182 DCHECK(current != nullptr);
3183 return false;
3184 }
3185
SetIntrinsic(Intrinsics intrinsic,IntrinsicNeedsEnvironment needs_env,IntrinsicSideEffects side_effects,IntrinsicExceptions exceptions)3186 void HInvoke::SetIntrinsic(Intrinsics intrinsic,
3187 IntrinsicNeedsEnvironment needs_env,
3188 IntrinsicSideEffects side_effects,
3189 IntrinsicExceptions exceptions) {
3190 intrinsic_ = intrinsic;
3191 IntrinsicOptimizations opt(this);
3192
3193 // Adjust method's side effects from intrinsic table.
3194 switch (side_effects) {
3195 case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
3196 case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
3197 case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
3198 case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
3199 }
3200
3201 if (needs_env == kNoEnvironment) {
3202 opt.SetDoesNotNeedEnvironment();
3203 } else {
3204 // If we need an environment, that means there will be a call, which can trigger GC.
3205 SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
3206 }
3207 // Adjust method's exception status from intrinsic table.
3208 SetCanThrow(exceptions == kCanThrow);
3209 }
3210
IsStringAlloc() const3211 bool HNewInstance::IsStringAlloc() const {
3212 return GetEntrypoint() == kQuickAllocStringObject;
3213 }
3214
NeedsEnvironment() const3215 bool HInvoke::NeedsEnvironment() const {
3216 if (!IsIntrinsic()) {
3217 return true;
3218 }
3219 IntrinsicOptimizations opt(*this);
3220 return !opt.GetDoesNotNeedEnvironment();
3221 }
3222
GetDexFileForPcRelativeDexCache() const3223 const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
3224 ArtMethod* caller = GetEnvironment()->GetMethod();
3225 ScopedObjectAccess soa(Thread::Current());
3226 // `caller` is null for a top-level graph representing a method whose declaring
3227 // class was not resolved.
3228 return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
3229 }
3230
operator <<(std::ostream & os,HInvokeStaticOrDirect::ClinitCheckRequirement rhs)3231 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs) {
3232 switch (rhs) {
3233 case HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit:
3234 return os << "explicit";
3235 case HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit:
3236 return os << "implicit";
3237 case HInvokeStaticOrDirect::ClinitCheckRequirement::kNone:
3238 return os << "none";
3239 }
3240 }
3241
CanBeNull() const3242 bool HInvokeStaticOrDirect::CanBeNull() const {
3243 if (GetType() != DataType::Type::kReference || IsStringInit()) {
3244 return false;
3245 }
3246 switch (GetIntrinsic()) {
3247 #define DEFINE_BOXED_CASE(name, unused1, unused2, unused3, unused4) \
3248 case Intrinsics::k##name##ValueOf: \
3249 return false;
3250 BOXED_TYPES(DEFINE_BOXED_CASE)
3251 #undef DEFINE_BOXED_CASE
3252 default:
3253 return true;
3254 }
3255 }
3256
CanDoImplicitNullCheckOn(HInstruction * obj) const3257 bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const {
3258 if (obj != InputAt(0)) {
3259 return false;
3260 }
3261 switch (GetIntrinsic()) {
3262 case Intrinsics::kNone:
3263 return true;
3264 case Intrinsics::kReferenceRefersTo:
3265 return true;
3266 default:
3267 // TODO: Add implicit null checks in more intrinsics.
3268 return false;
3269 }
3270 }
3271
InstructionDataEquals(const HInstruction * other) const3272 bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
3273 const HLoadClass* other_load_class = other->AsLoadClass();
3274 // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
3275 // names rather than type indexes. However, we shall also have to re-think the hash code.
3276 if (type_index_ != other_load_class->type_index_ ||
3277 GetPackedFields() != other_load_class->GetPackedFields()) {
3278 return false;
3279 }
3280 switch (GetLoadKind()) {
3281 case LoadKind::kBootImageRelRo:
3282 case LoadKind::kJitBootImageAddress:
3283 case LoadKind::kJitTableAddress: {
3284 ScopedObjectAccess soa(Thread::Current());
3285 return GetClass().Get() == other_load_class->GetClass().Get();
3286 }
3287 default:
3288 DCHECK(HasTypeReference(GetLoadKind()));
3289 return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
3290 }
3291 }
3292
InstructionDataEquals(const HInstruction * other) const3293 bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
3294 const HLoadString* other_load_string = other->AsLoadString();
3295 // TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
3296 // rather than their indexes. However, we shall also have to re-think the hash code.
3297 if (string_index_ != other_load_string->string_index_ ||
3298 GetPackedFields() != other_load_string->GetPackedFields()) {
3299 return false;
3300 }
3301 switch (GetLoadKind()) {
3302 case LoadKind::kBootImageRelRo:
3303 case LoadKind::kJitBootImageAddress:
3304 case LoadKind::kJitTableAddress: {
3305 ScopedObjectAccess soa(Thread::Current());
3306 return GetString().Get() == other_load_string->GetString().Get();
3307 }
3308 default:
3309 return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
3310 }
3311 }
3312
RemoveEnvironmentUsers()3313 void HInstruction::RemoveEnvironmentUsers() {
3314 for (const HUseListNode<HEnvironment*>& use : GetEnvUses()) {
3315 HEnvironment* user = use.GetUser();
3316 user->SetRawEnvAt(use.GetIndex(), nullptr);
3317 }
3318 env_uses_.clear();
3319 }
3320
ReplaceInstrOrPhiByClone(HInstruction * instr)3321 HInstruction* ReplaceInstrOrPhiByClone(HInstruction* instr) {
3322 HInstruction* clone = instr->Clone(instr->GetBlock()->GetGraph()->GetAllocator());
3323 HBasicBlock* block = instr->GetBlock();
3324
3325 if (instr->IsPhi()) {
3326 HPhi* phi = instr->AsPhi();
3327 DCHECK(!phi->HasEnvironment());
3328 HPhi* phi_clone = clone->AsPhi();
3329 block->ReplaceAndRemovePhiWith(phi, phi_clone);
3330 } else {
3331 block->ReplaceAndRemoveInstructionWith(instr, clone);
3332 if (instr->HasEnvironment()) {
3333 clone->CopyEnvironmentFrom(instr->GetEnvironment());
3334 HLoopInformation* loop_info = block->GetLoopInformation();
3335 if (instr->IsSuspendCheck() && loop_info != nullptr) {
3336 loop_info->SetSuspendCheck(clone->AsSuspendCheck());
3337 }
3338 }
3339 }
3340 return clone;
3341 }
3342
3343 // Returns an instruction with the opposite Boolean value from 'cond'.
InsertOppositeCondition(HInstruction * cond,HInstruction * cursor)3344 HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction* cursor) {
3345 ArenaAllocator* allocator = GetAllocator();
3346
3347 if (cond->IsCondition() &&
3348 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType())) {
3349 // Can't reverse floating point conditions. We have to use HBooleanNot in that case.
3350 HInstruction* lhs = cond->InputAt(0);
3351 HInstruction* rhs = cond->InputAt(1);
3352 HInstruction* replacement = nullptr;
3353 switch (cond->AsCondition()->GetOppositeCondition()) { // get *opposite*
3354 case kCondEQ: replacement = new (allocator) HEqual(lhs, rhs); break;
3355 case kCondNE: replacement = new (allocator) HNotEqual(lhs, rhs); break;
3356 case kCondLT: replacement = new (allocator) HLessThan(lhs, rhs); break;
3357 case kCondLE: replacement = new (allocator) HLessThanOrEqual(lhs, rhs); break;
3358 case kCondGT: replacement = new (allocator) HGreaterThan(lhs, rhs); break;
3359 case kCondGE: replacement = new (allocator) HGreaterThanOrEqual(lhs, rhs); break;
3360 case kCondB: replacement = new (allocator) HBelow(lhs, rhs); break;
3361 case kCondBE: replacement = new (allocator) HBelowOrEqual(lhs, rhs); break;
3362 case kCondA: replacement = new (allocator) HAbove(lhs, rhs); break;
3363 case kCondAE: replacement = new (allocator) HAboveOrEqual(lhs, rhs); break;
3364 default:
3365 LOG(FATAL) << "Unexpected condition";
3366 UNREACHABLE();
3367 }
3368 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3369 return replacement;
3370 } else if (cond->IsIntConstant()) {
3371 HIntConstant* int_const = cond->AsIntConstant();
3372 if (int_const->IsFalse()) {
3373 return GetIntConstant(1);
3374 } else {
3375 DCHECK(int_const->IsTrue()) << int_const->GetValue();
3376 return GetIntConstant(0);
3377 }
3378 } else {
3379 HInstruction* replacement = new (allocator) HBooleanNot(cond);
3380 cursor->GetBlock()->InsertInstructionBefore(replacement, cursor);
3381 return replacement;
3382 }
3383 }
3384
operator <<(std::ostream & os,const MoveOperands & rhs)3385 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
3386 os << "["
3387 << " source=" << rhs.GetSource()
3388 << " destination=" << rhs.GetDestination()
3389 << " type=" << rhs.GetType()
3390 << " instruction=";
3391 if (rhs.GetInstruction() != nullptr) {
3392 os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
3393 } else {
3394 os << "null";
3395 }
3396 os << " ]";
3397 return os;
3398 }
3399
operator <<(std::ostream & os,TypeCheckKind rhs)3400 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs) {
3401 switch (rhs) {
3402 case TypeCheckKind::kUnresolvedCheck:
3403 return os << "unresolved_check";
3404 case TypeCheckKind::kExactCheck:
3405 return os << "exact_check";
3406 case TypeCheckKind::kClassHierarchyCheck:
3407 return os << "class_hierarchy_check";
3408 case TypeCheckKind::kAbstractClassCheck:
3409 return os << "abstract_class_check";
3410 case TypeCheckKind::kInterfaceCheck:
3411 return os << "interface_check";
3412 case TypeCheckKind::kArrayObjectCheck:
3413 return os << "array_object_check";
3414 case TypeCheckKind::kArrayCheck:
3415 return os << "array_check";
3416 case TypeCheckKind::kBitstringCheck:
3417 return os << "bitstring_check";
3418 }
3419 }
3420
3421 // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
3422 #define CHECK_INTRINSICS_ENUM_VALUES(Name, InvokeType, _, SideEffects, Exceptions, ...) \
3423 static_assert( \
3424 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
3425 "Instrinsics enumeration space overflow.");
ART_INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)3426 ART_INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
3427 #undef CHECK_INTRINSICS_ENUM_VALUES
3428
3429 // Function that returns whether an intrinsic needs an environment or not.
3430 static inline IntrinsicNeedsEnvironment NeedsEnvironmentIntrinsic(Intrinsics i) {
3431 switch (i) {
3432 case Intrinsics::kNone:
3433 return kNeedsEnvironment; // Non-sensical for intrinsic.
3434 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3435 case Intrinsics::k ## Name: \
3436 return NeedsEnv;
3437 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3438 #undef OPTIMIZING_INTRINSICS
3439 }
3440 return kNeedsEnvironment;
3441 }
3442
3443 // Function that returns whether an intrinsic has side effects.
GetSideEffectsIntrinsic(Intrinsics i)3444 static inline IntrinsicSideEffects GetSideEffectsIntrinsic(Intrinsics i) {
3445 switch (i) {
3446 case Intrinsics::kNone:
3447 return kAllSideEffects;
3448 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3449 case Intrinsics::k ## Name: \
3450 return SideEffects;
3451 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3452 #undef OPTIMIZING_INTRINSICS
3453 }
3454 return kAllSideEffects;
3455 }
3456
3457 // Function that returns whether an intrinsic can throw exceptions.
GetExceptionsIntrinsic(Intrinsics i)3458 static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) {
3459 switch (i) {
3460 case Intrinsics::kNone:
3461 return kCanThrow;
3462 #define OPTIMIZING_INTRINSICS(Name, InvokeType, NeedsEnv, SideEffects, Exceptions, ...) \
3463 case Intrinsics::k ## Name: \
3464 return Exceptions;
3465 ART_INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3466 #undef OPTIMIZING_INTRINSICS
3467 }
3468 return kCanThrow;
3469 }
3470
SetResolvedMethod(ArtMethod * method,bool enable_intrinsic_opt)3471 void HInvoke::SetResolvedMethod(ArtMethod* method, bool enable_intrinsic_opt) {
3472 if (method != nullptr && method->IsIntrinsic() && enable_intrinsic_opt) {
3473 Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
3474 SetIntrinsic(intrinsic,
3475 NeedsEnvironmentIntrinsic(intrinsic),
3476 GetSideEffectsIntrinsic(intrinsic),
3477 GetExceptionsIntrinsic(intrinsic));
3478 }
3479 resolved_method_ = method;
3480 }
3481
IsGEZero(HInstruction * instruction)3482 bool IsGEZero(HInstruction* instruction) {
3483 DCHECK(instruction != nullptr);
3484 if (instruction->IsArrayLength()) {
3485 return true;
3486 } else if (instruction->IsMin()) {
3487 // Instruction MIN(>=0, >=0) is >= 0.
3488 return IsGEZero(instruction->InputAt(0)) &&
3489 IsGEZero(instruction->InputAt(1));
3490 } else if (instruction->IsAbs()) {
3491 // Instruction ABS(>=0) is >= 0.
3492 // NOTE: ABS(minint) = minint prevents assuming
3493 // >= 0 without looking at the argument.
3494 return IsGEZero(instruction->InputAt(0));
3495 }
3496 int64_t value = -1;
3497 return IsInt64AndGet(instruction, &value) && value >= 0;
3498 }
3499
3500 } // namespace art
3501