1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/move-optimizer.h"
6 #include "test/unittests/compiler/instruction-sequence-unittest.h"
7
8 namespace v8 {
9 namespace internal {
10 namespace compiler {
11
12 class MoveOptimizerTest : public InstructionSequenceTest {
13 public:
LastInstruction()14 Instruction* LastInstruction() { return sequence()->instructions().back(); }
15
AddMove(Instruction * instr,TestOperand from,TestOperand to,Instruction::GapPosition pos=Instruction::START)16 void AddMove(Instruction* instr, TestOperand from, TestOperand to,
17 Instruction::GapPosition pos = Instruction::START) {
18 auto parallel_move = instr->GetOrCreateParallelMove(pos, zone());
19 parallel_move->AddMove(ConvertMoveArg(from), ConvertMoveArg(to));
20 }
21
NonRedundantSize(ParallelMove * moves)22 int NonRedundantSize(ParallelMove* moves) {
23 int i = 0;
24 for (auto move : *moves) {
25 if (move->IsRedundant()) continue;
26 i++;
27 }
28 return i;
29 }
30
Contains(ParallelMove * moves,TestOperand from_op,TestOperand to_op)31 bool Contains(ParallelMove* moves, TestOperand from_op, TestOperand to_op) {
32 auto from = ConvertMoveArg(from_op);
33 auto to = ConvertMoveArg(to_op);
34 for (auto move : *moves) {
35 if (move->IsRedundant()) continue;
36 if (move->source().Equals(from) && move->destination().Equals(to)) {
37 return true;
38 }
39 }
40 return false;
41 }
42
43 // TODO(dcarney): add a verifier.
Optimize()44 void Optimize() {
45 WireBlocks();
46 if (FLAG_trace_turbo) {
47 OFStream os(stdout);
48 PrintableInstructionSequence printable = {config(), sequence()};
49 os << "----- Instruction sequence before move optimization -----\n"
50 << printable;
51 }
52 MoveOptimizer move_optimizer(zone(), sequence());
53 move_optimizer.Run();
54 if (FLAG_trace_turbo) {
55 OFStream os(stdout);
56 PrintableInstructionSequence printable = {config(), sequence()};
57 os << "----- Instruction sequence after move optimization -----\n"
58 << printable;
59 }
60 }
61
62 private:
ConvertMoveArg(TestOperand op)63 InstructionOperand ConvertMoveArg(TestOperand op) {
64 CHECK_EQ(kNoValue, op.vreg_.value_);
65 CHECK_NE(kNoValue, op.value_);
66 switch (op.type_) {
67 case kConstant:
68 return ConstantOperand(op.value_);
69 case kFixedSlot:
70 return AllocatedOperand(LocationOperand::STACK_SLOT,
71 MachineRepresentation::kWord32, op.value_);
72 case kFixedRegister:
73 CHECK(0 <= op.value_ && op.value_ < num_general_registers());
74 return AllocatedOperand(LocationOperand::REGISTER,
75 MachineRepresentation::kWord32, op.value_);
76 case kExplicit:
77 CHECK(0 <= op.value_ && op.value_ < num_general_registers());
78 return ExplicitOperand(LocationOperand::REGISTER,
79 MachineRepresentation::kWord32, op.value_);
80 default:
81 break;
82 }
83 CHECK(false);
84 return InstructionOperand();
85 }
86 };
87
88
TEST_F(MoveOptimizerTest,RemovesRedundant)89 TEST_F(MoveOptimizerTest, RemovesRedundant) {
90 StartBlock();
91 auto first_instr = EmitNop();
92 AddMove(first_instr, Reg(0), Reg(1));
93 auto last_instr = EmitNop();
94 AddMove(last_instr, Reg(1), Reg(0));
95 EndBlock(Last());
96
97 Optimize();
98
99 CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
100 auto move = last_instr->parallel_moves()[0];
101 CHECK_EQ(1, NonRedundantSize(move));
102 CHECK(Contains(move, Reg(0), Reg(1)));
103 }
104
105
TEST_F(MoveOptimizerTest,RemovesRedundantExplicit)106 TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
107 int first_reg_index =
108 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN)
109 ->GetAllocatableGeneralCode(0);
110 int second_reg_index =
111 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN)
112 ->GetAllocatableGeneralCode(1);
113
114 StartBlock();
115 auto first_instr = EmitNop();
116 AddMove(first_instr, Reg(first_reg_index), ExplicitReg(second_reg_index));
117 auto last_instr = EmitNop();
118 AddMove(last_instr, Reg(second_reg_index), Reg(first_reg_index));
119 EndBlock(Last());
120
121 Optimize();
122
123 CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
124 auto move = last_instr->parallel_moves()[0];
125 CHECK_EQ(1, NonRedundantSize(move));
126 CHECK(Contains(move, Reg(first_reg_index), ExplicitReg(second_reg_index)));
127 }
128
129
TEST_F(MoveOptimizerTest,SplitsConstants)130 TEST_F(MoveOptimizerTest, SplitsConstants) {
131 StartBlock();
132 EndBlock(Last());
133
134 auto gap = LastInstruction();
135 AddMove(gap, Const(1), Slot(0));
136 AddMove(gap, Const(1), Slot(1));
137 AddMove(gap, Const(1), Reg(0));
138 AddMove(gap, Const(1), Slot(2));
139
140 Optimize();
141
142 auto move = gap->parallel_moves()[0];
143 CHECK_EQ(1, NonRedundantSize(move));
144 CHECK(Contains(move, Const(1), Reg(0)));
145
146 move = gap->parallel_moves()[1];
147 CHECK_EQ(3, NonRedundantSize(move));
148 CHECK(Contains(move, Reg(0), Slot(0)));
149 CHECK(Contains(move, Reg(0), Slot(1)));
150 CHECK(Contains(move, Reg(0), Slot(2)));
151 }
152
153
TEST_F(MoveOptimizerTest,SimpleMerge)154 TEST_F(MoveOptimizerTest, SimpleMerge) {
155 StartBlock();
156 EndBlock(Branch(Imm(), 1, 2));
157
158 StartBlock();
159 EndBlock(Jump(2));
160 AddMove(LastInstruction(), Reg(0), Reg(1));
161
162 StartBlock();
163 EndBlock(Jump(1));
164 AddMove(LastInstruction(), Reg(0), Reg(1));
165
166 StartBlock();
167 EndBlock(Last());
168
169 auto last = LastInstruction();
170
171 Optimize();
172
173 auto move = last->parallel_moves()[0];
174 CHECK_EQ(1, NonRedundantSize(move));
175 CHECK(Contains(move, Reg(0), Reg(1)));
176 }
177
178
TEST_F(MoveOptimizerTest,SimpleMergeCycle)179 TEST_F(MoveOptimizerTest, SimpleMergeCycle) {
180 StartBlock();
181 EndBlock(Branch(Imm(), 1, 2));
182
183 StartBlock();
184 EndBlock(Jump(2));
185 auto gap_0 = LastInstruction();
186 AddMove(gap_0, Reg(0), Reg(1));
187 AddMove(LastInstruction(), Reg(1), Reg(0));
188
189 StartBlock();
190 EndBlock(Jump(1));
191 auto gap_1 = LastInstruction();
192 AddMove(gap_1, Reg(0), Reg(1));
193 AddMove(gap_1, Reg(1), Reg(0));
194
195 StartBlock();
196 EndBlock(Last());
197
198 auto last = LastInstruction();
199
200 Optimize();
201
202 CHECK(gap_0->AreMovesRedundant());
203 CHECK(gap_1->AreMovesRedundant());
204 auto move = last->parallel_moves()[0];
205 CHECK_EQ(2, NonRedundantSize(move));
206 CHECK(Contains(move, Reg(0), Reg(1)));
207 CHECK(Contains(move, Reg(1), Reg(0)));
208 }
209
210
TEST_F(MoveOptimizerTest,GapsCanMoveOverInstruction)211 TEST_F(MoveOptimizerTest, GapsCanMoveOverInstruction) {
212 StartBlock();
213 int const_index = 1;
214 DefineConstant(const_index);
215 Instruction* ctant_def = LastInstruction();
216 AddMove(ctant_def, Reg(1), Reg(0));
217
218 Instruction* last = EmitNop();
219 AddMove(last, Const(const_index), Reg(0));
220 AddMove(last, Reg(0), Reg(1));
221 EndBlock(Last());
222 Optimize();
223
224 ParallelMove* inst1_start =
225 ctant_def->GetParallelMove(Instruction::GapPosition::START);
226 ParallelMove* inst1_end =
227 ctant_def->GetParallelMove(Instruction::GapPosition::END);
228 ParallelMove* last_start =
229 last->GetParallelMove(Instruction::GapPosition::START);
230 CHECK(inst1_start == nullptr || inst1_start->size() == 0);
231 CHECK(inst1_end == nullptr || inst1_end->size() == 0);
232 CHECK(last_start->size() == 2);
233 int redundants = 0;
234 int assignment = 0;
235 for (MoveOperands* move : *last_start) {
236 if (move->IsRedundant()) {
237 ++redundants;
238 } else {
239 ++assignment;
240 CHECK(move->destination().IsRegister());
241 CHECK(move->source().IsConstant());
242 }
243 }
244 CHECK_EQ(1, redundants);
245 CHECK_EQ(1, assignment);
246 }
247
248
249 } // namespace compiler
250 } // namespace internal
251 } // namespace v8
252