1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "dead_code_elimination.h"
18
19 #include "android-base/logging.h"
20 #include "base/array_ref.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "base/scoped_arena_allocator.h"
24 #include "base/scoped_arena_containers.h"
25 #include "base/stl_util.h"
26 #include "optimizing/nodes.h"
27 #include "optimizing/nodes_vector.h"
28 #include "ssa_phi_elimination.h"
29
30 namespace art HIDDEN {
31
MarkReachableBlocks(HGraph * graph,ArenaBitVector * visited)32 static void MarkReachableBlocks(HGraph* graph, ArenaBitVector* visited) {
33 // Use local allocator for allocating memory.
34 ScopedArenaAllocator allocator(graph->GetArenaStack());
35
36 ScopedArenaVector<HBasicBlock*> worklist(allocator.Adapter(kArenaAllocDCE));
37 constexpr size_t kDefaultWorlistSize = 8;
38 worklist.reserve(kDefaultWorlistSize);
39 visited->SetBit(graph->GetEntryBlock()->GetBlockId());
40 worklist.push_back(graph->GetEntryBlock());
41
42 while (!worklist.empty()) {
43 HBasicBlock* block = worklist.back();
44 worklist.pop_back();
45 int block_id = block->GetBlockId();
46 DCHECK(visited->IsBitSet(block_id));
47
48 ArrayRef<HBasicBlock* const> live_successors(block->GetSuccessors());
49 HInstruction* last_instruction = block->GetLastInstruction();
50 if (last_instruction->IsIf()) {
51 HIf* if_instruction = last_instruction->AsIf();
52 HInstruction* condition = if_instruction->InputAt(0);
53 if (condition->IsIntConstant()) {
54 if (condition->AsIntConstant()->IsTrue()) {
55 live_successors = live_successors.SubArray(0u, 1u);
56 DCHECK_EQ(live_successors[0], if_instruction->IfTrueSuccessor());
57 } else {
58 DCHECK(condition->AsIntConstant()->IsFalse()) << condition->AsIntConstant()->GetValue();
59 live_successors = live_successors.SubArray(1u, 1u);
60 DCHECK_EQ(live_successors[0], if_instruction->IfFalseSuccessor());
61 }
62 }
63 } else if (last_instruction->IsPackedSwitch()) {
64 HPackedSwitch* switch_instruction = last_instruction->AsPackedSwitch();
65 HInstruction* switch_input = switch_instruction->InputAt(0);
66 if (switch_input->IsIntConstant()) {
67 int32_t switch_value = switch_input->AsIntConstant()->GetValue();
68 int32_t start_value = switch_instruction->GetStartValue();
69 // Note: Though the spec forbids packed-switch values to wrap around, we leave
70 // that task to the verifier and use unsigned arithmetic with it's "modulo 2^32"
71 // semantics to check if the value is in range, wrapped or not.
72 uint32_t switch_index =
73 static_cast<uint32_t>(switch_value) - static_cast<uint32_t>(start_value);
74 if (switch_index < switch_instruction->GetNumEntries()) {
75 live_successors = live_successors.SubArray(switch_index, 1u);
76 DCHECK_EQ(live_successors[0], block->GetSuccessors()[switch_index]);
77 } else {
78 live_successors = live_successors.SubArray(switch_instruction->GetNumEntries(), 1u);
79 DCHECK_EQ(live_successors[0], switch_instruction->GetDefaultBlock());
80 }
81 }
82 }
83
84 for (HBasicBlock* successor : live_successors) {
85 // Add only those successors that have not been visited yet.
86 if (!visited->IsBitSet(successor->GetBlockId())) {
87 visited->SetBit(successor->GetBlockId());
88 worklist.push_back(successor);
89 }
90 }
91 }
92 }
93
MaybeRecordDeadBlock(HBasicBlock * block)94 void HDeadCodeElimination::MaybeRecordDeadBlock(HBasicBlock* block) {
95 if (stats_ != nullptr) {
96 stats_->RecordStat(MethodCompilationStat::kRemovedDeadInstruction,
97 block->GetPhis().CountSize() + block->GetInstructions().CountSize());
98 }
99 }
100
MaybeRecordSimplifyIf()101 void HDeadCodeElimination::MaybeRecordSimplifyIf() {
102 if (stats_ != nullptr) {
103 stats_->RecordStat(MethodCompilationStat::kSimplifyIf);
104 }
105 }
106
HasInput(HCondition * instruction,HInstruction * input)107 static bool HasInput(HCondition* instruction, HInstruction* input) {
108 return (instruction->InputAt(0) == input) ||
109 (instruction->InputAt(1) == input);
110 }
111
HasEquality(IfCondition condition)112 static bool HasEquality(IfCondition condition) {
113 switch (condition) {
114 case kCondEQ:
115 case kCondLE:
116 case kCondGE:
117 case kCondBE:
118 case kCondAE:
119 return true;
120 case kCondNE:
121 case kCondLT:
122 case kCondGT:
123 case kCondB:
124 case kCondA:
125 return false;
126 }
127 }
128
Evaluate(HCondition * condition,HInstruction * left,HInstruction * right)129 static HConstant* Evaluate(HCondition* condition, HInstruction* left, HInstruction* right) {
130 if (left == right && !DataType::IsFloatingPointType(left->GetType())) {
131 return condition->GetBlock()->GetGraph()->GetIntConstant(
132 HasEquality(condition->GetCondition()) ? 1 : 0);
133 }
134
135 if (!left->IsConstant() || !right->IsConstant()) {
136 return nullptr;
137 }
138
139 if (left->IsIntConstant()) {
140 return condition->Evaluate(left->AsIntConstant(), right->AsIntConstant());
141 } else if (left->IsNullConstant()) {
142 return condition->Evaluate(left->AsNullConstant(), right->AsNullConstant());
143 } else if (left->IsLongConstant()) {
144 return condition->Evaluate(left->AsLongConstant(), right->AsLongConstant());
145 } else if (left->IsFloatConstant()) {
146 return condition->Evaluate(left->AsFloatConstant(), right->AsFloatConstant());
147 } else {
148 DCHECK(left->IsDoubleConstant());
149 return condition->Evaluate(left->AsDoubleConstant(), right->AsDoubleConstant());
150 }
151 }
152
RemoveNonNullControlDependences(HBasicBlock * block,HBasicBlock * throws)153 static bool RemoveNonNullControlDependences(HBasicBlock* block, HBasicBlock* throws) {
154 // Test for an if as last statement.
155 if (!block->EndsWithIf()) {
156 return false;
157 }
158 HIf* ifs = block->GetLastInstruction()->AsIf();
159 // Find either:
160 // if obj == null
161 // throws
162 // else
163 // not_throws
164 // or:
165 // if obj != null
166 // not_throws
167 // else
168 // throws
169 HInstruction* cond = ifs->InputAt(0);
170 HBasicBlock* not_throws = nullptr;
171 if (throws == ifs->IfTrueSuccessor() && cond->IsEqual()) {
172 not_throws = ifs->IfFalseSuccessor();
173 } else if (throws == ifs->IfFalseSuccessor() && cond->IsNotEqual()) {
174 not_throws = ifs->IfTrueSuccessor();
175 } else {
176 return false;
177 }
178 DCHECK(cond->IsEqual() || cond->IsNotEqual());
179 HInstruction* obj = cond->InputAt(1);
180 if (obj->IsNullConstant()) {
181 obj = cond->InputAt(0);
182 } else if (!cond->InputAt(0)->IsNullConstant()) {
183 return false;
184 }
185
186 // We can't create a BoundType for an object with an invalid RTI.
187 const ReferenceTypeInfo ti = obj->GetReferenceTypeInfo();
188 if (!ti.IsValid()) {
189 return false;
190 }
191
192 // Scan all uses of obj and find null check under control dependence.
193 HBoundType* bound = nullptr;
194 const HUseList<HInstruction*>& uses = obj->GetUses();
195 for (auto it = uses.begin(), end = uses.end(); it != end;) {
196 HInstruction* user = it->GetUser();
197 ++it; // increment before possibly replacing
198 if (user->IsNullCheck()) {
199 HBasicBlock* user_block = user->GetBlock();
200 if (user_block != block &&
201 user_block != throws &&
202 block->Dominates(user_block)) {
203 if (bound == nullptr) {
204 bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj);
205 bound->SetUpperBound(ti, /*can_be_null*/ false);
206 bound->SetReferenceTypeInfo(ti);
207 bound->SetCanBeNull(false);
208 not_throws->InsertInstructionBefore(bound, not_throws->GetFirstInstruction());
209 }
210 user->ReplaceWith(bound);
211 user_block->RemoveInstruction(user);
212 }
213 }
214 }
215 return bound != nullptr;
216 }
217
218 // Simplify the pattern:
219 //
220 // B1
221 // / \
222 // | instr_1
223 // | ...
224 // | instr_n
225 // | foo() // always throws
226 // | instr_n+2
227 // | ...
228 // | instr_n+m
229 // \ goto B2
230 // \ /
231 // B2
232 //
233 // Into:
234 //
235 // B1
236 // / \
237 // | instr_1
238 // | ...
239 // | instr_n
240 // | foo()
241 // | goto Exit
242 // | |
243 // B2 Exit
244 //
245 // Rationale:
246 // Removal of the never taken edge to B2 may expose other optimization opportunities, such as code
247 // sinking.
248 //
249 // Note: The example above is a simple one that uses a `goto` but we could end the block with an If,
250 // for example.
SimplifyAlwaysThrows()251 bool HDeadCodeElimination::SimplifyAlwaysThrows() {
252 HBasicBlock* exit = graph_->GetExitBlock();
253 if (!graph_->HasAlwaysThrowingInvokes() || exit == nullptr) {
254 return false;
255 }
256
257 bool rerun_dominance_and_loop_analysis = false;
258
259 // Order does not matter, just pick one.
260 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
261 if (block->IsTryBlock()) {
262 // We don't want to perform the simplify always throws optimizations for throws inside of
263 // tries since those throws might not go to the exit block.
264 continue;
265 }
266
267 // We iterate to find the first instruction that always throws. If two instructions always
268 // throw, the first one will throw and the second one will never be reached.
269 HInstruction* throwing_invoke = nullptr;
270 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
271 if (it.Current()->IsInvoke() && it.Current()->AsInvoke()->AlwaysThrows()) {
272 throwing_invoke = it.Current();
273 break;
274 }
275 }
276
277 if (throwing_invoke == nullptr) {
278 // No always-throwing instruction found. Continue with the rest of the blocks.
279 continue;
280 }
281
282 // If we are already pointing at the exit block we could still remove the instructions
283 // between the always throwing instruction, and the exit block. If we have no other
284 // instructions, just continue since there's nothing to do.
285 if (block->GetSuccessors().size() == 1 &&
286 block->GetSingleSuccessor() == exit &&
287 block->GetLastInstruction()->GetPrevious() == throwing_invoke) {
288 continue;
289 }
290
291 // We split the block at the throwing instruction, and the instructions after the throwing
292 // instructions will be disconnected from the graph after `block` points to the exit.
293 // `RemoveDeadBlocks` will take care of removing this new block and its instructions.
294 // Even though `SplitBefore` doesn't guarantee the graph to remain in SSA form, it is fine
295 // since we do not break it.
296 HBasicBlock* new_block = block->SplitBefore(throwing_invoke->GetNext(),
297 /* require_graph_not_in_ssa_form= */ false);
298 DCHECK_EQ(block->GetSingleSuccessor(), new_block);
299 block->ReplaceSuccessor(new_block, exit);
300
301 rerun_dominance_and_loop_analysis = true;
302 MaybeRecordStat(stats_, MethodCompilationStat::kSimplifyThrowingInvoke);
303 // Perform a quick follow up optimization on object != null control dependences
304 // that is much cheaper to perform now than in a later phase.
305 // If there are multiple predecessors, none may end with a HIf as required in
306 // RemoveNonNullControlDependences because we split critical edges.
307 if (block->GetPredecessors().size() == 1u &&
308 RemoveNonNullControlDependences(block->GetSinglePredecessor(), block)) {
309 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedNullCheck);
310 }
311 }
312
313 // We need to re-analyze the graph in order to run DCE afterwards.
314 if (rerun_dominance_and_loop_analysis) {
315 graph_->RecomputeDominatorTree();
316 return true;
317 }
318 return false;
319 }
320
SimplifyIfs()321 bool HDeadCodeElimination::SimplifyIfs() {
322 bool simplified_one_or_more_ifs = false;
323 bool rerun_dominance_and_loop_analysis = false;
324
325 // Iterating in PostOrder it's better for MaybeAddPhi as it can add a Phi for multiple If
326 // instructions in a chain without updating the dominator chain. The branch redirection itself can
327 // work in PostOrder or ReversePostOrder without issues.
328 for (HBasicBlock* block : graph_->GetPostOrder()) {
329 if (block->IsCatchBlock()) {
330 // This simplification cannot be applied to catch blocks, because exception handler edges do
331 // not represent normal control flow. Though in theory this could still apply to normal
332 // control flow going directly to a catch block, we cannot support it at the moment because
333 // the catch Phi's inputs do not correspond to the catch block's predecessors, so we cannot
334 // identify which predecessor corresponds to a given statically evaluated input.
335 continue;
336 }
337
338 HInstruction* last = block->GetLastInstruction();
339 if (!last->IsIf()) {
340 continue;
341 }
342
343 if (block->IsLoopHeader()) {
344 // We do not apply this optimization to loop headers as this could create irreducible loops.
345 continue;
346 }
347
348 // We will add a Phi which allows the simplification to take place in cases where it wouldn't.
349 MaybeAddPhi(block);
350
351 // TODO(solanes): Investigate support for multiple phis in `block`. We can potentially "push
352 // downwards" existing Phis into the true/false branches. For example, let's say we have another
353 // Phi: Phi(x1,x2,x3,x4,x5,x6). This could turn into Phi(x1,x2) in the true branch, Phi(x3,x4)
354 // in the false branch, and remain as Phi(x5,x6) in `block` (for edges that we couldn't
355 // redirect). We might even be able to remove some phis altogether as they will have only one
356 // value.
357 if (block->HasSinglePhi() &&
358 block->GetFirstPhi()->HasOnlyOneNonEnvironmentUse()) {
359 HInstruction* first = block->GetFirstInstruction();
360 bool has_only_phi_and_if = (last == first) && (last->InputAt(0) == block->GetFirstPhi());
361 bool has_only_phi_condition_and_if =
362 !has_only_phi_and_if &&
363 first->IsCondition() &&
364 HasInput(first->AsCondition(), block->GetFirstPhi()) &&
365 (first->GetNext() == last) &&
366 (last->InputAt(0) == first) &&
367 first->HasOnlyOneNonEnvironmentUse();
368
369 if (has_only_phi_and_if || has_only_phi_condition_and_if) {
370 HPhi* phi = block->GetFirstPhi()->AsPhi();
371 bool phi_input_is_left = (first->InputAt(0) == phi);
372
373 // Walk over all inputs of the phis and update the control flow of
374 // predecessors feeding constants to the phi.
375 // Note that phi->InputCount() may change inside the loop.
376 for (size_t i = 0; i < phi->InputCount();) {
377 HInstruction* input = phi->InputAt(i);
378 HInstruction* value_to_check = nullptr;
379 if (has_only_phi_and_if) {
380 if (input->IsIntConstant()) {
381 value_to_check = input;
382 }
383 } else {
384 DCHECK(has_only_phi_condition_and_if);
385 if (phi_input_is_left) {
386 value_to_check = Evaluate(first->AsCondition(), input, first->InputAt(1));
387 } else {
388 value_to_check = Evaluate(first->AsCondition(), first->InputAt(0), input);
389 }
390 }
391 if (value_to_check == nullptr) {
392 // Could not evaluate to a constant, continue iterating over the inputs.
393 ++i;
394 } else {
395 HBasicBlock* predecessor_to_update = block->GetPredecessors()[i];
396 HBasicBlock* successor_to_update = nullptr;
397 if (value_to_check->AsIntConstant()->IsTrue()) {
398 successor_to_update = last->AsIf()->IfTrueSuccessor();
399 } else {
400 DCHECK(value_to_check->AsIntConstant()->IsFalse())
401 << value_to_check->AsIntConstant()->GetValue();
402 successor_to_update = last->AsIf()->IfFalseSuccessor();
403 }
404 predecessor_to_update->ReplaceSuccessor(block, successor_to_update);
405 phi->RemoveInputAt(i);
406 simplified_one_or_more_ifs = true;
407 if (block->IsInLoop()) {
408 rerun_dominance_and_loop_analysis = true;
409 }
410 // For simplicity, don't create a dead block, let the dead code elimination
411 // pass deal with it.
412 if (phi->InputCount() == 1) {
413 break;
414 }
415 }
416 }
417 if (block->GetPredecessors().size() == 1) {
418 phi->ReplaceWith(phi->InputAt(0));
419 block->RemovePhi(phi);
420 if (has_only_phi_condition_and_if) {
421 // Evaluate here (and not wait for a constant folding pass) to open
422 // more opportunities for DCE.
423 HInstruction* result = first->AsCondition()->TryStaticEvaluation();
424 if (result != nullptr) {
425 first->ReplaceWith(result);
426 block->RemoveInstruction(first);
427 }
428 }
429 }
430 if (simplified_one_or_more_ifs) {
431 MaybeRecordSimplifyIf();
432 }
433 }
434 }
435 }
436 // We need to re-analyze the graph in order to run DCE afterwards.
437 if (simplified_one_or_more_ifs) {
438 if (rerun_dominance_and_loop_analysis) {
439 graph_->RecomputeDominatorTree();
440 } else {
441 graph_->ClearDominanceInformation();
442 // We have introduced critical edges, remove them.
443 graph_->SimplifyCFG();
444 graph_->ComputeDominanceInformation();
445 graph_->ComputeTryBlockInformation();
446 }
447 }
448
449 return simplified_one_or_more_ifs;
450 }
451
MaybeAddPhi(HBasicBlock * block)452 void HDeadCodeElimination::MaybeAddPhi(HBasicBlock* block) {
453 DCHECK(block->GetLastInstruction()->IsIf());
454 HIf* if_instruction = block->GetLastInstruction()->AsIf();
455 if (if_instruction->InputAt(0)->IsConstant()) {
456 // Constant values are handled in RemoveDeadBlocks.
457 return;
458 }
459
460 if (block->GetNumberOfPredecessors() < 2u) {
461 // Nothing to redirect.
462 return;
463 }
464
465 if (!block->GetPhis().IsEmpty()) {
466 // SimplifyIf doesn't currently work with multiple phis. Adding a phi here won't help that
467 // optimization.
468 return;
469 }
470
471 HBasicBlock* dominator = block->GetDominator();
472 if (!dominator->EndsWithIf()) {
473 return;
474 }
475
476 HInstruction* input = if_instruction->InputAt(0);
477 HInstruction* dominator_input = dominator->GetLastInstruction()->AsIf()->InputAt(0);
478 const bool same_input = dominator_input == input;
479 if (!same_input) {
480 // Try to see if the dominator has the opposite input (e.g. if(cond) and if(!cond)). If that's
481 // the case, we can perform the optimization with the false and true branches reversed.
482 if (!dominator_input->IsCondition() || !input->IsCondition()) {
483 return;
484 }
485
486 HCondition* block_cond = input->AsCondition();
487 HCondition* dominator_cond = dominator_input->AsCondition();
488
489 if (block_cond->GetLeft() != dominator_cond->GetLeft() ||
490 block_cond->GetRight() != dominator_cond->GetRight() ||
491 block_cond->GetOppositeCondition() != dominator_cond->GetCondition()) {
492 return;
493 }
494 }
495
496 if (kIsDebugBuild) {
497 // `block`'s successors should have only one predecessor. Otherwise, we have a critical edge in
498 // the graph.
499 for (HBasicBlock* succ : block->GetSuccessors()) {
500 DCHECK_EQ(succ->GetNumberOfPredecessors(), 1u);
501 }
502 }
503
504 const size_t pred_size = block->GetNumberOfPredecessors();
505 HPhi* new_phi = new (graph_->GetAllocator())
506 HPhi(graph_->GetAllocator(), kNoRegNumber, pred_size, DataType::Type::kInt32);
507
508 for (size_t index = 0; index < pred_size; index++) {
509 HBasicBlock* pred = block->GetPredecessors()[index];
510 const bool dominated_by_true =
511 dominator->GetLastInstruction()->AsIf()->IfTrueSuccessor()->Dominates(pred);
512 const bool dominated_by_false =
513 dominator->GetLastInstruction()->AsIf()->IfFalseSuccessor()->Dominates(pred);
514 if (dominated_by_true == dominated_by_false) {
515 // In this case, we can't know if we are coming from the true branch, or the false branch. It
516 // happens in cases like:
517 // 1 (outer if)
518 // / \
519 // 2 3 (inner if)
520 // | / \
521 // | 4 5
522 // \/ |
523 // 6 |
524 // \ |
525 // 7 (has the same if(cond) as 1)
526 // |
527 // 8
528 // `7` (which would be `block` in this example), and `6` will come from both the true path and
529 // the false path of `1`. We bumped into something similar in SelectGenerator. See
530 // HSelectGenerator::TryFixupDoubleDiamondPattern.
531 // TODO(solanes): Figure out if we can fix up the graph into a double diamond in a generic way
532 // so that DeadCodeElimination and SelectGenerator can take advantage of it.
533
534 if (!same_input) {
535 // `1` and `7` having the opposite condition is a case we are missing. We could potentially
536 // add a BooleanNot instruction to be able to add the Phi, but it seems like overkill since
537 // this case is not that common.
538 return;
539 }
540
541 // The Phi will have `0`, `1`, and `cond` as inputs. If SimplifyIf redirects 0s and 1s, we
542 // will end up with Phi(cond,...,cond) which will be replaced by `cond`. Effectively, we will
543 // redirect edges that we are able to redirect and the rest will remain as before (i.e. we
544 // won't have an extra Phi).
545 new_phi->SetRawInputAt(index, input);
546 } else {
547 // Redirect to either the true branch (1), or the false branch (0).
548 // Given that `dominated_by_true` is the exact opposite of `dominated_by_false`,
549 // `(same_input && dominated_by_true) || (!same_input && dominated_by_false)` is equivalent to
550 // `same_input == dominated_by_true`.
551 new_phi->SetRawInputAt(
552 index,
553 same_input == dominated_by_true ? graph_->GetIntConstant(1) : graph_->GetIntConstant(0));
554 }
555 }
556
557 block->AddPhi(new_phi);
558 if_instruction->ReplaceInput(new_phi, 0);
559
560 // Remove the old input now, if possible. This allows the branch redirection in SimplifyIf to
561 // work without waiting for another pass of DCE.
562 if (input->IsDeadAndRemovable()) {
563 DCHECK(!same_input)
564 << " if both blocks have the same condition, it shouldn't be dead and removable since the "
565 << "dominator block's If instruction would be using that condition.";
566 input->GetBlock()->RemoveInstruction(input);
567 }
568 MaybeRecordStat(stats_, MethodCompilationStat::kSimplifyIfAddedPhi);
569 }
570
ConnectSuccessiveBlocks()571 void HDeadCodeElimination::ConnectSuccessiveBlocks() {
572 // Order does not matter. Skip the entry block by starting at index 1 in reverse post order.
573 for (size_t i = 1u, size = graph_->GetReversePostOrder().size(); i != size; ++i) {
574 HBasicBlock* block = graph_->GetReversePostOrder()[i];
575 DCHECK(!block->IsEntryBlock());
576 while (block->GetLastInstruction()->IsGoto()) {
577 HBasicBlock* successor = block->GetSingleSuccessor();
578 if (successor->IsExitBlock() || successor->GetPredecessors().size() != 1u) {
579 break;
580 }
581 DCHECK_LT(i, IndexOfElement(graph_->GetReversePostOrder(), successor));
582 block->MergeWith(successor);
583 --size;
584 DCHECK_EQ(size, graph_->GetReversePostOrder().size());
585 DCHECK_EQ(block, graph_->GetReversePostOrder()[i]);
586 // Reiterate on this block in case it can be merged with its new successor.
587 }
588 }
589 }
590
591 struct HDeadCodeElimination::TryBelongingInformation {
TryBelongingInformationart::HDeadCodeElimination::TryBelongingInformation592 TryBelongingInformation(HGraph* graph, ScopedArenaAllocator* allocator)
593 : blocks_in_try(allocator, graph->GetBlocks().size(), /*expandable=*/false, kArenaAllocDCE),
594 coalesced_try_entries(
595 allocator, graph->GetBlocks().size(), /*expandable=*/false, kArenaAllocDCE) {}
596
597 // Which blocks belong in the try.
598 ArenaBitVector blocks_in_try;
599 // Which other try entries are referencing this same try.
600 ArenaBitVector coalesced_try_entries;
601 };
602
CanPerformTryRemoval(const TryBelongingInformation & try_belonging_info)603 bool HDeadCodeElimination::CanPerformTryRemoval(const TryBelongingInformation& try_belonging_info) {
604 const ArenaVector<HBasicBlock*>& blocks = graph_->GetBlocks();
605 for (uint32_t i : try_belonging_info.blocks_in_try.Indexes()) {
606 for (HInstructionIterator it(blocks[i]->GetInstructions()); !it.Done(); it.Advance()) {
607 if (it.Current()->CanThrow()) {
608 return false;
609 }
610 }
611 }
612 return true;
613 }
614
DisconnectHandlersAndUpdateTryBoundary(HBasicBlock * block,bool * any_block_in_loop)615 void HDeadCodeElimination::DisconnectHandlersAndUpdateTryBoundary(
616 HBasicBlock* block,
617 /* out */ bool* any_block_in_loop) {
618 if (block->IsInLoop()) {
619 *any_block_in_loop = true;
620 }
621
622 // Disconnect the handlers.
623 while (block->GetSuccessors().size() > 1) {
624 HBasicBlock* handler = block->GetSuccessors()[1];
625 DCHECK(handler->IsCatchBlock());
626 block->RemoveSuccessor(handler);
627 handler->RemovePredecessor(block);
628 if (handler->IsInLoop()) {
629 *any_block_in_loop = true;
630 }
631 }
632
633 // Change TryBoundary to Goto.
634 DCHECK(block->EndsWithTryBoundary());
635 HInstruction* last = block->GetLastInstruction();
636 block->RemoveInstruction(last);
637 block->AddInstruction(new (graph_->GetAllocator()) HGoto(last->GetDexPc()));
638 DCHECK_EQ(block->GetSuccessors().size(), 1u);
639 }
640
RemoveTry(HBasicBlock * try_entry,const TryBelongingInformation & try_belonging_info,bool * any_block_in_loop)641 void HDeadCodeElimination::RemoveTry(HBasicBlock* try_entry,
642 const TryBelongingInformation& try_belonging_info,
643 /* out */ bool* any_block_in_loop) {
644 // Update all try entries.
645 DCHECK(try_entry->EndsWithTryBoundary());
646 DCHECK(try_entry->GetLastInstruction()->AsTryBoundary()->IsEntry());
647 DisconnectHandlersAndUpdateTryBoundary(try_entry, any_block_in_loop);
648
649 const ArenaVector<HBasicBlock*>& blocks = graph_->GetBlocks();
650 for (uint32_t i : try_belonging_info.coalesced_try_entries.Indexes()) {
651 HBasicBlock* other_try_entry = blocks[i];
652 DCHECK(other_try_entry->EndsWithTryBoundary());
653 DCHECK(other_try_entry->GetLastInstruction()->AsTryBoundary()->IsEntry());
654 DisconnectHandlersAndUpdateTryBoundary(other_try_entry, any_block_in_loop);
655 }
656
657 // Update the blocks in the try.
658 for (uint32_t i : try_belonging_info.blocks_in_try.Indexes()) {
659 HBasicBlock* block = blocks[i];
660 // Update the try catch information since now the try doesn't exist.
661 block->SetTryCatchInformation(nullptr);
662 if (block->IsInLoop()) {
663 *any_block_in_loop = true;
664 }
665
666 if (block->EndsWithTryBoundary()) {
667 // Try exits.
668 DCHECK(!block->GetLastInstruction()->AsTryBoundary()->IsEntry());
669 DisconnectHandlersAndUpdateTryBoundary(block, any_block_in_loop);
670
671 if (block->GetSingleSuccessor()->IsExitBlock()) {
672 // `block` used to be a single exit TryBoundary that got turned into a Goto. It
673 // is now pointing to the exit which we don't allow. To fix it, we disconnect
674 // `block` from its predecessor and RemoveDeadBlocks will remove it from the
675 // graph.
676 DCHECK(block->IsSingleGoto());
677 HBasicBlock* predecessor = block->GetSinglePredecessor();
678 predecessor->ReplaceSuccessor(block, graph_->GetExitBlock());
679
680 if (!block->GetDominatedBlocks().empty()) {
681 // Update domination tree if `block` dominates a block to keep the graph consistent.
682 DCHECK_EQ(block->GetDominatedBlocks().size(), 1u);
683 DCHECK_EQ(graph_->GetExitBlock()->GetDominator(), block);
684 predecessor->AddDominatedBlock(graph_->GetExitBlock());
685 graph_->GetExitBlock()->SetDominator(predecessor);
686 block->RemoveDominatedBlock(graph_->GetExitBlock());
687 }
688 }
689 }
690 }
691 }
692
RemoveUnneededTries()693 bool HDeadCodeElimination::RemoveUnneededTries() {
694 if (!graph_->HasTryCatch()) {
695 return false;
696 }
697
698 // Use local allocator for allocating memory.
699 ScopedArenaAllocator allocator(graph_->GetArenaStack());
700
701 // Collect which blocks are part of which try.
702 ScopedArenaUnorderedMap<HBasicBlock*, TryBelongingInformation> tries(
703 allocator.Adapter(kArenaAllocDCE));
704 for (HBasicBlock* block : graph_->GetReversePostOrderSkipEntryBlock()) {
705 if (block->IsTryBlock()) {
706 HBasicBlock* key = block->GetTryCatchInformation()->GetTryEntry().GetBlock();
707 auto it = tries.find(key);
708 if (it == tries.end()) {
709 it = tries.insert({key, TryBelongingInformation(graph_, &allocator)}).first;
710 }
711 it->second.blocks_in_try.SetBit(block->GetBlockId());
712 }
713 }
714
715 // Deduplicate the tries which have different try entries but they are really the same try.
716 for (auto it = tries.begin(); it != tries.end(); it++) {
717 HBasicBlock* block = it->first;
718 DCHECK(block->EndsWithTryBoundary());
719 HTryBoundary* try_boundary = block->GetLastInstruction()->AsTryBoundary();
720 for (auto other_it = next(it); other_it != tries.end(); /*other_it++ in the loop*/) {
721 HBasicBlock* other_block = other_it->first;
722 DCHECK(other_block->EndsWithTryBoundary());
723 HTryBoundary* other_try_boundary = other_block->GetLastInstruction()->AsTryBoundary();
724 if (try_boundary->HasSameExceptionHandlersAs(*other_try_boundary)) {
725 // Merge the entries as they are really the same one.
726 // Block merging.
727 it->second.blocks_in_try.Union(&other_it->second.blocks_in_try);
728
729 // Add the coalesced try entry to update it too.
730 it->second.coalesced_try_entries.SetBit(other_block->GetBlockId());
731
732 // Erase the other entry.
733 other_it = tries.erase(other_it);
734 } else {
735 other_it++;
736 }
737 }
738 }
739
740 size_t removed_tries = 0;
741 bool any_block_in_loop = false;
742
743 // Check which tries contain throwing instructions.
744 for (const auto& entry : tries) {
745 if (CanPerformTryRemoval(entry.second)) {
746 ++removed_tries;
747 RemoveTry(entry.first, entry.second, &any_block_in_loop);
748 }
749 }
750
751 if (removed_tries != 0) {
752 // We want to:
753 // 1) Update the dominance information
754 // 2) Remove catch block subtrees, if they are now unreachable.
755 // If we run the dominance recomputation without removing the code, those catch blocks will
756 // not be part of the post order and won't be removed. If we don't run the dominance
757 // recomputation, we risk RemoveDeadBlocks not running it and leaving the graph in an
758 // inconsistent state. So, what we can do is run RemoveDeadBlocks and force a recomputation.
759 // Note that we are not guaranteed to remove a catch block if we have nested try blocks:
760 //
761 // try {
762 // ... nothing can throw. TryBoundary A ...
763 // try {
764 // ... can throw. TryBoundary B...
765 // } catch (Error e) {}
766 // } catch (Exception e) {}
767 //
768 // In the example above, we can remove the TryBoundary A but the Exception catch cannot be
769 // removed as the TryBoundary B might still throw into that catch. TryBoundary A and B don't get
770 // coalesced since they have different catch handlers.
771
772 RemoveDeadBlocks(/* force_recomputation= */ true, any_block_in_loop);
773 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedTry, removed_tries);
774 return true;
775 } else {
776 return false;
777 }
778 }
779
RemoveEmptyIfs()780 bool HDeadCodeElimination::RemoveEmptyIfs() {
781 bool did_opt = false;
782 for (HBasicBlock* block : graph_->GetPostOrder()) {
783 if (!block->EndsWithIf()) {
784 continue;
785 }
786
787 HIf* if_instr = block->GetLastInstruction()->AsIf();
788 HBasicBlock* true_block = if_instr->IfTrueSuccessor();
789 HBasicBlock* false_block = if_instr->IfFalseSuccessor();
790
791 // We can use `visited_blocks` to detect cases like
792 // 1
793 // / \
794 // 2 3
795 // \ /
796 // 4 ...
797 // | /
798 // 5
799 // where 2, 3, and 4 are single HGoto blocks, and block 5 has Phis.
800 ScopedArenaAllocator allocator(graph_->GetArenaStack());
801 ArenaBitVector visited_blocks(
802 &allocator, graph_->GetBlocks().size(), /*expandable=*/ false, kArenaAllocDCE);
803 HBasicBlock* merge_true = true_block;
804 visited_blocks.SetBit(merge_true->GetBlockId());
805 while (merge_true->IsSingleGoto()) {
806 merge_true = merge_true->GetSuccessors()[0];
807 visited_blocks.SetBit(merge_true->GetBlockId());
808 }
809
810 HBasicBlock* merge_false = false_block;
811 while (!visited_blocks.IsBitSet(merge_false->GetBlockId()) && merge_false->IsSingleGoto()) {
812 merge_false = merge_false->GetSuccessors()[0];
813 }
814
815 if (!visited_blocks.IsBitSet(merge_false->GetBlockId()) || !merge_false->GetPhis().IsEmpty()) {
816 // TODO(solanes): We could allow Phis iff both branches have the same value for all Phis. This
817 // may not be covered by SsaRedundantPhiElimination in cases like `HPhi[A,A,B]` where the Phi
818 // itself is not redundant for the general case but it is for a pair of branches.
819 continue;
820 }
821
822 // Data structures to help remove now-dead instructions.
823 ScopedArenaQueue<HInstruction*> maybe_remove(allocator.Adapter(kArenaAllocDCE));
824 ArenaBitVector visited(
825 &allocator, graph_->GetCurrentInstructionId(), /*expandable=*/ false, kArenaAllocDCE);
826 maybe_remove.push(if_instr->InputAt(0));
827 visited.SetBit(if_instr->GetId());
828
829 // Swap HIf with HGoto
830 block->ReplaceAndRemoveInstructionWith(
831 if_instr, new (graph_->GetAllocator()) HGoto(if_instr->GetDexPc()));
832
833 // Reconnect blocks
834 block->RemoveSuccessor(true_block);
835 block->RemoveSuccessor(false_block);
836 true_block->RemovePredecessor(block);
837 false_block->RemovePredecessor(block);
838 block->AddSuccessor(merge_false);
839
840 // Remove now dead instructions e.g. comparisons that are only used as input to the if
841 // instruction. This can allow for further removal of other empty ifs.
842 while (!maybe_remove.empty()) {
843 HInstruction* instr = maybe_remove.front();
844 maybe_remove.pop();
845 if (instr->IsDeadAndRemovable()) {
846 for (HInstruction* input : instr->GetInputs()) {
847 if (visited.IsBitSet(input->GetId())) {
848 continue;
849 }
850 visited.SetBit(input->GetId());
851 maybe_remove.push(input);
852 }
853 instr->GetBlock()->RemoveInstructionOrPhi(instr);
854 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadInstruction);
855 }
856 }
857
858 did_opt = true;
859 }
860
861 if (did_opt) {
862 graph_->RecomputeDominatorTree();
863 }
864
865 return did_opt;
866 }
867
RemoveDeadBlocks(bool force_recomputation,bool force_loop_recomputation)868 bool HDeadCodeElimination::RemoveDeadBlocks(bool force_recomputation,
869 bool force_loop_recomputation) {
870 DCHECK_IMPLIES(force_loop_recomputation, force_recomputation);
871
872 // Use local allocator for allocating memory.
873 ScopedArenaAllocator allocator(graph_->GetArenaStack());
874
875 // Classify blocks as reachable/unreachable.
876 ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
877
878 MarkReachableBlocks(graph_, &live_blocks);
879 bool removed_one_or_more_blocks = false;
880 bool rerun_dominance_and_loop_analysis = false;
881
882 // Remove all dead blocks. Iterate in post order because removal needs the
883 // block's chain of dominators and nested loops need to be updated from the
884 // inside out.
885 for (HBasicBlock* block : graph_->GetPostOrder()) {
886 int id = block->GetBlockId();
887 if (!live_blocks.IsBitSet(id)) {
888 MaybeRecordDeadBlock(block);
889 block->DisconnectAndDelete();
890 removed_one_or_more_blocks = true;
891 if (block->IsInLoop()) {
892 rerun_dominance_and_loop_analysis = true;
893 }
894 }
895 }
896
897 // If we removed at least one block, we need to recompute the full
898 // dominator tree and try block membership.
899 if (removed_one_or_more_blocks || force_recomputation) {
900 if (rerun_dominance_and_loop_analysis || force_loop_recomputation) {
901 graph_->RecomputeDominatorTree();
902 } else {
903 graph_->ClearDominanceInformation();
904 graph_->ComputeDominanceInformation();
905 graph_->ComputeTryBlockInformation();
906 }
907 }
908 return removed_one_or_more_blocks;
909 }
910
RemoveDeadInstructions()911 void HDeadCodeElimination::RemoveDeadInstructions() {
912 // Process basic blocks in post-order in the dominator tree, so that
913 // a dead instruction depending on another dead instruction is removed.
914 for (HBasicBlock* block : graph_->GetPostOrder()) {
915 // Traverse this block's instructions in backward order and remove
916 // the unused ones.
917 HBackwardInstructionIterator i(block->GetInstructions());
918 // Skip the first iteration, as the last instruction of a block is
919 // a branching instruction.
920 DCHECK(i.Current()->IsControlFlow());
921 for (i.Advance(); !i.Done(); i.Advance()) {
922 HInstruction* inst = i.Current();
923 DCHECK(!inst->IsControlFlow());
924 if (inst->IsDeadAndRemovable()) {
925 block->RemoveInstruction(inst);
926 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadInstruction);
927 }
928 }
929
930 // Same for Phis.
931 for (HBackwardInstructionIterator phi_it(block->GetPhis()); !phi_it.Done(); phi_it.Advance()) {
932 DCHECK(phi_it.Current()->IsPhi());
933 HPhi* phi = phi_it.Current()->AsPhi();
934 if (phi->IsPhiDeadAndRemovable()) {
935 block->RemovePhi(phi);
936 MaybeRecordStat(stats_, MethodCompilationStat::kRemovedDeadPhi);
937 }
938 }
939 }
940 }
941
UpdateGraphFlags()942 void HDeadCodeElimination::UpdateGraphFlags() {
943 bool has_monitor_operations = false;
944 bool has_traditional_simd = false;
945 bool has_predicated_simd = false;
946 bool has_bounds_checks = false;
947 bool has_always_throwing_invokes = false;
948
949 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
950 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
951 HInstruction* instruction = it.Current();
952 if (instruction->IsMonitorOperation()) {
953 has_monitor_operations = true;
954 } else if (instruction->IsVecOperation()) {
955 HVecOperation* vec_instruction = instruction->AsVecOperation();
956 if (vec_instruction->IsPredicated()) {
957 has_predicated_simd = true;
958 } else {
959 has_traditional_simd = true;
960 }
961 } else if (instruction->IsBoundsCheck()) {
962 has_bounds_checks = true;
963 } else if (instruction->IsInvoke() && instruction->AsInvoke()->AlwaysThrows()) {
964 has_always_throwing_invokes = true;
965 }
966 }
967 }
968
969 graph_->SetHasMonitorOperations(has_monitor_operations);
970 graph_->SetHasTraditionalSIMD(has_traditional_simd);
971 graph_->SetHasPredicatedSIMD(has_predicated_simd);
972 graph_->SetHasBoundsChecks(has_bounds_checks);
973 graph_->SetHasAlwaysThrowingInvokes(has_always_throwing_invokes);
974 }
975
Run()976 bool HDeadCodeElimination::Run() {
977 // Do not eliminate dead blocks if the graph has irreducible loops. We could
978 // support it, but that would require changes in our loop representation to handle
979 // multiple entry points. We decided it was not worth the complexity.
980 if (!graph_->HasIrreducibleLoops()) {
981 // Simplify graph to generate more dead block patterns.
982 ConnectSuccessiveBlocks();
983 bool did_any_simplification = false;
984 did_any_simplification |= SimplifyAlwaysThrows();
985 did_any_simplification |= SimplifyIfs();
986 did_any_simplification |= RemoveEmptyIfs();
987 did_any_simplification |= RemoveDeadBlocks();
988 // We call RemoveDeadBlocks before RemoveUnneededTries to remove the dead blocks from the
989 // previous optimizations. Otherwise, we might detect that a try has throwing instructions but
990 // they are actually dead code. RemoveUnneededTryBoundary will call RemoveDeadBlocks again if
991 // needed.
992 did_any_simplification |= RemoveUnneededTries();
993 if (did_any_simplification) {
994 // Connect successive blocks created by dead branches.
995 ConnectSuccessiveBlocks();
996 }
997 }
998 SsaRedundantPhiElimination(graph_).Run();
999 RemoveDeadInstructions();
1000 UpdateGraphFlags();
1001 return true;
1002 }
1003
1004 } // namespace art
1005