1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "gtest/gtest.h"
18
19 #include <cstdint>
20
21 #include "berberis/backend/x86_64/machine_ir.h"
22 #include "berberis/backend/x86_64/machine_ir_check.h"
23 #include "berberis/decoder/riscv64/decoder.h"
24 #include "berberis/guest_state/guest_addr.h"
25
26 #include "frontend.h"
27
28 namespace berberis {
29
30 namespace {
31
32 constexpr static GuestAddr kStartGuestAddr = 0x0000'aaaa'bbbb'ccccULL;
33 // Assume all instructions are not compressed.
34 constexpr int32_t kInsnSize = 4;
35
DoesEdgeExist(const MachineBasicBlock * src_bb,const MachineBasicBlock * end_bb)36 bool DoesEdgeExist(const MachineBasicBlock* src_bb, const MachineBasicBlock* end_bb) {
37 bool out_edge_found = false;
38 for (auto out_edge : src_bb->out_edges()) {
39 if (out_edge->dst() == end_bb) {
40 out_edge_found = true;
41 break;
42 }
43 }
44
45 if (!out_edge_found) {
46 return false;
47 }
48
49 for (auto in_edge : end_bb->in_edges()) {
50 if (in_edge->src() == src_bb) {
51 // in edge found
52 return true;
53 }
54 }
55 return false;
56 }
57
FindEntryBasicBlock(const MachineIR * machine_ir)58 MachineBasicBlock* FindEntryBasicBlock(const MachineIR* machine_ir) {
59 for (auto* bb : machine_ir->bb_list()) {
60 if (bb->in_edges().size() == 0U) {
61 return bb;
62 }
63 }
64 return nullptr;
65 }
66
FindEntrySuccessor(const MachineIR * machine_ir)67 const MachineBasicBlock* FindEntrySuccessor(const MachineIR* machine_ir) {
68 auto* entry_bb = FindEntryBasicBlock(machine_ir);
69 CHECK_GE(entry_bb->insn_list().size(), 1UL);
70 auto* branch_insn = entry_bb->insn_list().back();
71 CHECK_EQ(branch_insn->opcode(), kMachineOpPseudoBranch);
72 return static_cast<PseudoBranch*>(branch_insn)->then_bb();
73 }
74
CheckBasicBlockEndsWith(const MachineBasicBlock * bb,MachineOpcode opcode)75 void CheckBasicBlockEndsWith(const MachineBasicBlock* bb, MachineOpcode opcode) {
76 ASSERT_NE(bb, nullptr);
77 ASSERT_EQ(bb->insn_list().back()->opcode(), opcode);
78 }
79
80 template <int32_t kOffset>
insn_at()81 constexpr GuestAddr insn_at() {
82 return kStartGuestAddr + kOffset * kInsnSize;
83 }
84
TEST(HeavyOptimizerFrontendTest,BranchTargets)85 TEST(HeavyOptimizerFrontendTest, BranchTargets) {
86 Arena arena;
87 x86_64::MachineIR machine_ir(&arena);
88 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
89
90 frontend.StartInsn();
91 auto tmp = frontend.GetImm(0xbeefULL);
92 frontend.IncrementInsnAddr(kInsnSize);
93
94 frontend.StartInsn();
95 frontend.SetReg(3, tmp);
96 frontend.SetReg(3, tmp);
97 frontend.IncrementInsnAddr(kInsnSize);
98
99 frontend.StartInsn();
100 frontend.SetReg(3, tmp);
101 frontend.IncrementInsnAddr(kInsnSize);
102
103 frontend.Finalize(insn_at<3>());
104
105 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
106
107 auto branch_targets = frontend.branch_targets();
108
109 EXPECT_TRUE(branch_targets[insn_at<0>()].second.has_value());
110 auto it = branch_targets[insn_at<0>()].second.value();
111
112 EXPECT_TRUE(branch_targets[insn_at<1>()].second.has_value());
113 it = branch_targets[insn_at<1>()].second.value();
114
115 EXPECT_TRUE(branch_targets[insn_at<2>()].second.has_value());
116 it = branch_targets[insn_at<2>()].second.value();
117
118 EXPECT_FALSE(branch_targets[insn_at<3>()].second.has_value());
119
120 EXPECT_TRUE(branch_targets.find(kStartGuestAddr - kInsnSize) == branch_targets.end());
121 EXPECT_TRUE(branch_targets.find(insn_at<4>()) == branch_targets.end());
122 }
123
TEST(HeavyOptimizerFrontendTest,LoopInsideRegion)124 TEST(HeavyOptimizerFrontendTest, LoopInsideRegion) {
125 Arena arena;
126 x86_64::MachineIR machine_ir(&arena);
127 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
128
129 frontend.StartInsn();
130 auto tmp = frontend.GetImm(0xbeefULL);
131 frontend.IncrementInsnAddr(kInsnSize);
132
133 frontend.StartInsn();
134 frontend.SetReg(3, tmp);
135 frontend.IncrementInsnAddr(kInsnSize);
136
137 frontend.Finalize(insn_at<1>());
138
139 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
140
141 auto* preloop_bb = FindEntrySuccessor(&machine_ir);
142 auto* branch_insn = preloop_bb->insn_list().back();
143 ASSERT_EQ(branch_insn->opcode(), kMachineOpPseudoBranch);
144 auto* loop_bb = static_cast<PseudoBranch*>(branch_insn)->then_bb();
145 auto* cmpb = *std::next(loop_bb->insn_list().rbegin());
146 ASSERT_EQ(cmpb->opcode(), kMachineOpCmpbMemBaseDispImm);
147 branch_insn = loop_bb->insn_list().back();
148 ASSERT_EQ(branch_insn->opcode(), kMachineOpPseudoCondBranch);
149 auto* signal_exit_bb = static_cast<PseudoCondBranch*>(branch_insn)->then_bb();
150 branch_insn = signal_exit_bb->insn_list().back();
151 ASSERT_EQ(branch_insn->opcode(), kMachineOpPseudoJump);
152
153 EXPECT_EQ(preloop_bb->in_edges().size(), 1UL);
154 EXPECT_EQ(preloop_bb->out_edges().size(), 1UL);
155 EXPECT_EQ(loop_bb->in_edges().size(), 2UL);
156 EXPECT_EQ(loop_bb->out_edges().size(), 2UL);
157 EXPECT_EQ(signal_exit_bb->in_edges().size(), 1UL);
158 EXPECT_EQ(signal_exit_bb->out_edges().size(), 0UL);
159
160 EXPECT_TRUE(DoesEdgeExist(preloop_bb, loop_bb));
161 EXPECT_TRUE(DoesEdgeExist(loop_bb, loop_bb));
162 EXPECT_TRUE(DoesEdgeExist(loop_bb, signal_exit_bb));
163 }
164
TEST(HeavyOptimizerFrontendTest,BranchBuildsJump)165 TEST(HeavyOptimizerFrontendTest, BranchBuildsJump) {
166 Arena arena;
167 x86_64::MachineIR machine_ir(&arena);
168 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
169
170 frontend.StartInsn();
171 frontend.Branch(kInsnSize);
172 frontend.IncrementInsnAddr(kInsnSize);
173
174 // Branch builds Jump.
175 CheckBasicBlockEndsWith(FindEntrySuccessor(&machine_ir), kMachineOpPseudoJump);
176 }
177
TEST(HeavyOptimizerFrontendTest,ResolveJumps)178 TEST(HeavyOptimizerFrontendTest, ResolveJumps) {
179 Arena arena;
180 x86_64::MachineIR machine_ir(&arena);
181 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
182
183 frontend.StartInsn();
184 frontend.Branch(kInsnSize);
185 frontend.IncrementInsnAddr(kInsnSize);
186
187 // NOP, just to include this address in the region.
188 frontend.StartInsn();
189 frontend.IncrementInsnAddr(kInsnSize);
190
191 // ResolveJumps happens here.
192 frontend.Finalize(insn_at<2>());
193
194 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
195
196 // Jump is replaced by Branch.
197 CheckBasicBlockEndsWith(FindEntrySuccessor(&machine_ir), kMachineOpPseudoBranch);
198 }
199
TEST(HeavyOptimizerFrontendTest,ResolveJumpToAlreadyReplacedJump)200 TEST(HeavyOptimizerFrontendTest, ResolveJumpToAlreadyReplacedJump) {
201 Arena arena;
202 x86_64::MachineIR machine_ir(&arena);
203 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
204
205 frontend.StartInsn();
206 frontend.Branch(kInsnSize);
207 frontend.IncrementInsnAddr(kInsnSize);
208
209 frontend.StartInsn();
210 frontend.Branch(-kInsnSize);
211 frontend.IncrementInsnAddr(kInsnSize);
212
213 // ResolveJumps happens here.
214 // We are testing that after one of the jumps is resolved the internal data
215 // structures are still valid for resolution of the second jump.
216 frontend.Finalize(insn_at<2>());
217
218 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
219
220 // Both Jumps are replaced by Branches
221 auto* bb = FindEntrySuccessor(&machine_ir);
222 CheckBasicBlockEndsWith(bb, kMachineOpPseudoBranch);
223
224 auto* next_bb = bb->out_edges()[0]->dst();
225 // This one is CondBranch because we also insert pending signals check.
226 CheckBasicBlockEndsWith(next_bb, kMachineOpPseudoCondBranch);
227 ASSERT_EQ(next_bb->out_edges()[1]->dst(), bb);
228 }
229
TEST(HeavyOptimizerFrontendTest,ResolveJumpToAlreadyReplacedBackJump)230 TEST(HeavyOptimizerFrontendTest, ResolveJumpToAlreadyReplacedBackJump) {
231 Arena arena;
232 x86_64::MachineIR machine_ir(&arena);
233 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
234
235 frontend.StartInsn();
236 frontend.CompareAndBranch(HeavyOptimizerFrontend::Decoder::BranchOpcode::kBeq,
237 MachineReg(1),
238 MachineReg(2),
239 2 * kInsnSize);
240 frontend.IncrementInsnAddr(kInsnSize);
241
242 frontend.StartInsn();
243 frontend.Branch(-kInsnSize);
244 frontend.IncrementInsnAddr(kInsnSize);
245
246 frontend.StartInsn();
247 frontend.Branch(-kInsnSize);
248 frontend.IncrementInsnAddr(kInsnSize);
249
250 // ResolveJumps happens here.
251 // We are testing that after a back jump is resolved the internal data
252 // structures are still valid for resolution of another jump to it.
253 // Note, there is a possible order of resolutions where all back jumps are
254 // resolved after jumps that target them. But we assume that the resolution
255 // happens either top-down or down-top, in which case this test is useful.
256 frontend.Finalize(insn_at<3>());
257
258 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
259
260 // Both back Jumps are replaced by CondBranches because we also insert pending signals check.
261 //
262 // Expect
263 // -> BB1
264 // | COND_BRANCH
265 // | / \
266 // | BB2 <- BB3
267 // COND_BRANCH BB1 | BRANCH
268 // | |
269 // | BB4
270 // ------- COND_BRANCH_BB2
271 auto* bb1 = FindEntrySuccessor(&machine_ir);
272 CheckBasicBlockEndsWith(bb1, kMachineOpPseudoCondBranch);
273
274 auto* bb2 = bb1->out_edges()[1]->dst();
275 CheckBasicBlockEndsWith(bb2, kMachineOpPseudoCondBranch);
276 ASSERT_EQ(bb2->out_edges()[1]->dst(), bb1);
277
278 auto* bb3 = bb1->out_edges()[0]->dst();
279 CheckBasicBlockEndsWith(bb3, kMachineOpPseudoBranch);
280
281 auto* bb4 = bb3->out_edges()[0]->dst();
282 CheckBasicBlockEndsWith(bb4, kMachineOpPseudoCondBranch);
283 ASSERT_EQ(bb4->out_edges()[1]->dst(), bb2);
284 }
285
TEST(HeavyOptimizerFrontendTest,ResolveJumpToAnotherJump)286 TEST(HeavyOptimizerFrontendTest, ResolveJumpToAnotherJump) {
287 Arena arena;
288 x86_64::MachineIR machine_ir(&arena);
289 HeavyOptimizerFrontend frontend(&machine_ir, kStartGuestAddr);
290
291 // A conditional branch results is two basic blocks.
292 // BB0, BB1: kStartGuestAddr.
293 frontend.StartInsn();
294 frontend.CompareAndBranch(
295 HeavyOptimizerFrontend::Decoder::BranchOpcode::kBeq, MachineReg(1), MachineReg(2), 8);
296 frontend.IncrementInsnAddr(kInsnSize);
297
298 // Make sure the next Branch doesn't start a basic block, so that we'll
299 // need to split it in ResolveJumps.
300 // BB2: kStartGuestAddr + 4.
301 frontend.StartInsn();
302 (void)frontend.GetImm(0xbeefULL);
303 frontend.IncrementInsnAddr(kInsnSize);
304
305 // BB2: kStartGuestAddr + 8.
306 frontend.StartInsn();
307 frontend.Branch(kInsnSize);
308 frontend.IncrementInsnAddr(kInsnSize);
309
310 // BB3: kStartGuestAddr + 12.
311 frontend.StartInsn();
312 frontend.Branch(kInsnSize);
313 frontend.IncrementInsnAddr(kInsnSize);
314
315 frontend.Finalize(insn_at<4>());
316
317 // The main check of this test - the IR is integral.
318 ASSERT_EQ(x86_64::CheckMachineIR(machine_ir), x86_64::kMachineIRCheckSuccess);
319
320 // Expected control-flow:
321 // BB0 -> (BB2 -> BB4) -> BB3
322 // \___BB1____^
323 //
324 // When resolving BB1->BB4 jump we split BB2 into BB2 and BB4.
325 // Then we must resolve BB4->BB3 jump, otherwise BB3 will be unlinked from IR.
326 auto* bb0 = FindEntrySuccessor(&machine_ir);
327 CheckBasicBlockEndsWith(bb0, kMachineOpPseudoCondBranch);
328
329 auto* bb1 = bb0->out_edges()[1]->dst();
330 CheckBasicBlockEndsWith(bb1, kMachineOpPseudoBranch);
331
332 auto* bb5 = bb0->out_edges()[0]->dst();
333 CheckBasicBlockEndsWith(bb5, kMachineOpPseudoBranch);
334
335 auto* bb4 = bb5->out_edges()[0]->dst();
336 CheckBasicBlockEndsWith(bb4, kMachineOpPseudoBranch);
337
338 EXPECT_EQ(bb1->out_edges()[0]->dst(), bb4);
339
340 auto* bb2 = bb4->out_edges()[0]->dst();
341 CheckBasicBlockEndsWith(bb2, kMachineOpPseudoJump);
342 EXPECT_EQ(bb2->out_edges().size(), 0u);
343 }
344
345 } // namespace
346
347 } // namespace berberis
348