1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/jump-threading.h"
6 #include "src/compiler/code-generator-impl.h"
7
8 namespace v8 {
9 namespace internal {
10 namespace compiler {
11
12 #define TRACE(...) \
13 do { \
14 if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \
15 } while (false)
16
17 namespace {
18
19 struct JumpThreadingState {
20 bool forwarded;
21 ZoneVector<RpoNumber>& result;
22 ZoneStack<RpoNumber>& stack;
23
Clearv8::internal::compiler::__anonf859f2620111::JumpThreadingState24 void Clear(size_t count) { result.assign(count, unvisited()); }
PushIfUnvisitedv8::internal::compiler::__anonf859f2620111::JumpThreadingState25 void PushIfUnvisited(RpoNumber num) {
26 if (result[num.ToInt()] == unvisited()) {
27 stack.push(num);
28 result[num.ToInt()] = onstack();
29 }
30 }
Forwardv8::internal::compiler::__anonf859f2620111::JumpThreadingState31 void Forward(RpoNumber to) {
32 RpoNumber from = stack.top();
33 RpoNumber to_to = result[to.ToInt()];
34 bool pop = true;
35 if (to == from) {
36 TRACE(" xx %d\n", from.ToInt());
37 result[from.ToInt()] = from;
38 } else if (to_to == unvisited()) {
39 TRACE(" fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
40 stack.push(to);
41 result[to.ToInt()] = onstack();
42 pop = false; // recurse.
43 } else if (to_to == onstack()) {
44 TRACE(" fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
45 result[from.ToInt()] = to; // break the cycle.
46 forwarded = true;
47 } else {
48 TRACE(" fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49 result[from.ToInt()] = to_to; // forward the block.
50 forwarded = true;
51 }
52 if (pop) stack.pop();
53 }
unvisitedv8::internal::compiler::__anonf859f2620111::JumpThreadingState54 RpoNumber unvisited() { return RpoNumber::FromInt(-1); }
onstackv8::internal::compiler::__anonf859f2620111::JumpThreadingState55 RpoNumber onstack() { return RpoNumber::FromInt(-2); }
56 };
57
IsBlockWithBranchPoisoning(InstructionSequence * code,InstructionBlock * block)58 bool IsBlockWithBranchPoisoning(InstructionSequence* code,
59 InstructionBlock* block) {
60 if (block->PredecessorCount() != 1) return false;
61 RpoNumber pred_rpo = (block->predecessors())[0];
62 const InstructionBlock* pred = code->InstructionBlockAt(pred_rpo);
63 if (pred->code_start() == pred->code_end()) return false;
64 Instruction* instr = code->InstructionAt(pred->code_end() - 1);
65 FlagsMode mode = FlagsModeField::decode(instr->opcode());
66 return mode == kFlags_branch_and_poison;
67 }
68
69 } // namespace
70
ComputeForwarding(Zone * local_zone,ZoneVector<RpoNumber> & result,InstructionSequence * code,bool frame_at_start)71 bool JumpThreading::ComputeForwarding(Zone* local_zone,
72 ZoneVector<RpoNumber>& result,
73 InstructionSequence* code,
74 bool frame_at_start) {
75 ZoneStack<RpoNumber> stack(local_zone);
76 JumpThreadingState state = {false, result, stack};
77 state.Clear(code->InstructionBlockCount());
78
79 // Iterate over the blocks forward, pushing the blocks onto the stack.
80 for (auto const block : code->instruction_blocks()) {
81 RpoNumber current = block->rpo_number();
82 state.PushIfUnvisited(current);
83
84 // Process the stack, which implements DFS through empty blocks.
85 while (!state.stack.empty()) {
86 InstructionBlock* block = code->InstructionBlockAt(state.stack.top());
87 // Process the instructions in a block up to a non-empty instruction.
88 TRACE("jt [%d] B%d\n", static_cast<int>(stack.size()),
89 block->rpo_number().ToInt());
90 RpoNumber fw = block->rpo_number();
91 if (!IsBlockWithBranchPoisoning(code, block)) {
92 bool fallthru = true;
93 for (int i = block->code_start(); i < block->code_end(); ++i) {
94 Instruction* instr = code->InstructionAt(i);
95 if (!instr->AreMovesRedundant()) {
96 // can't skip instructions with non redundant moves.
97 TRACE(" parallel move\n");
98 fallthru = false;
99 } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
100 // can't skip instructions with flags continuations.
101 TRACE(" flags\n");
102 fallthru = false;
103 } else if (instr->IsNop()) {
104 // skip nops.
105 TRACE(" nop\n");
106 continue;
107 } else if (instr->arch_opcode() == kArchJmp) {
108 // try to forward the jump instruction.
109 TRACE(" jmp\n");
110 // if this block deconstructs the frame, we can't forward it.
111 // TODO(mtrofin): we can still forward if we end up building
112 // the frame at start. So we should move the decision of whether
113 // to build a frame or not in the register allocator, and trickle it
114 // here and to the code generator.
115 if (frame_at_start || !(block->must_deconstruct_frame() ||
116 block->must_construct_frame())) {
117 fw = code->InputRpo(instr, 0);
118 }
119 fallthru = false;
120 } else {
121 // can't skip other instructions.
122 TRACE(" other\n");
123 fallthru = false;
124 }
125 break;
126 }
127 if (fallthru) {
128 int next = 1 + block->rpo_number().ToInt();
129 if (next < code->InstructionBlockCount())
130 fw = RpoNumber::FromInt(next);
131 }
132 }
133 state.Forward(fw);
134 }
135 }
136
137 #ifdef DEBUG
138 for (RpoNumber num : result) {
139 DCHECK(num.IsValid());
140 }
141 #endif
142
143 if (FLAG_trace_turbo_jt) {
144 for (int i = 0; i < static_cast<int>(result.size()); i++) {
145 TRACE("B%d ", i);
146 int to = result[i].ToInt();
147 if (i != to) {
148 TRACE("-> B%d\n", to);
149 } else {
150 TRACE("\n");
151 }
152 }
153 }
154
155 return state.forwarded;
156 }
157
ApplyForwarding(Zone * local_zone,ZoneVector<RpoNumber> & result,InstructionSequence * code)158 void JumpThreading::ApplyForwarding(Zone* local_zone,
159 ZoneVector<RpoNumber>& result,
160 InstructionSequence* code) {
161 if (!FLAG_turbo_jt) return;
162
163 ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone);
164
165 // Skip empty blocks when the previous block doesn't fall through.
166 bool prev_fallthru = true;
167 for (auto const block : code->instruction_blocks()) {
168 int block_num = block->rpo_number().ToInt();
169 skip[block_num] = !prev_fallthru && result[block_num].ToInt() != block_num;
170
171 bool fallthru = true;
172 for (int i = block->code_start(); i < block->code_end(); ++i) {
173 Instruction* instr = code->InstructionAt(i);
174 FlagsMode mode = FlagsModeField::decode(instr->opcode());
175 if (mode == kFlags_branch || mode == kFlags_branch_and_poison) {
176 fallthru = false; // branches don't fall through to the next block.
177 } else if (instr->arch_opcode() == kArchJmp) {
178 if (skip[block_num]) {
179 // Overwrite a redundant jump with a nop.
180 TRACE("jt-fw nop @%d\n", i);
181 instr->OverwriteWithNop();
182 }
183 fallthru = false; // jumps don't fall through to the next block.
184 }
185 }
186 prev_fallthru = fallthru;
187 }
188
189 // Patch RPO immediates.
190 InstructionSequence::Immediates& immediates = code->immediates();
191 for (size_t i = 0; i < immediates.size(); i++) {
192 Constant constant = immediates[i];
193 if (constant.type() == Constant::kRpoNumber) {
194 RpoNumber rpo = constant.ToRpoNumber();
195 RpoNumber fw = result[rpo.ToInt()];
196 if (!(fw == rpo)) immediates[i] = Constant(fw);
197 }
198 }
199
200 // Recompute assembly order numbers.
201 int ao = 0;
202 for (auto const block : code->instruction_blocks()) {
203 if (!block->IsDeferred()) {
204 block->set_ao_number(RpoNumber::FromInt(ao));
205 if (!skip[block->rpo_number().ToInt()]) ao++;
206 }
207 }
208 for (auto const block : code->instruction_blocks()) {
209 if (block->IsDeferred()) {
210 block->set_ao_number(RpoNumber::FromInt(ao));
211 if (!skip[block->rpo_number().ToInt()]) ao++;
212 }
213 }
214 }
215
216 #undef TRACE
217
218 } // namespace compiler
219 } // namespace internal
220 } // namespace v8
221