1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
18 #define ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
19
20 #include <memory>
21 #include <ostream>
22 #include <string_view>
23 #include <string>
24 #include <tuple>
25 #include <vector>
26 #include <variant>
27
28 #include "base/indenter.h"
29 #include "base/malloc_arena_pool.h"
30 #include "base/scoped_arena_allocator.h"
31 #include "builder.h"
32 #include "common_compiler_test.h"
33 #include "dex/code_item_accessors-inl.h"
34 #include "dex/dex_file.h"
35 #include "dex/dex_instruction.h"
36 #include "dex/standard_dex_file.h"
37 #include "driver/dex_compilation_unit.h"
38 #include "graph_checker.h"
39 #include "gtest/gtest.h"
40 #include "handle_scope-inl.h"
41 #include "handle_scope.h"
42 #include "mirror/class_loader.h"
43 #include "mirror/dex_cache.h"
44 #include "nodes.h"
45 #include "scoped_thread_state_change.h"
46 #include "ssa_builder.h"
47 #include "ssa_liveness_analysis.h"
48
49 namespace art {
50
51 #define NUM_INSTRUCTIONS(...) \
52 (sizeof((uint16_t[]) {__VA_ARGS__}) /sizeof(uint16_t))
53
54 #define N_REGISTERS_CODE_ITEM(NUM_REGS, ...) \
55 { NUM_REGS, 0, 0, 0, 0, 0, NUM_INSTRUCTIONS(__VA_ARGS__), 0, __VA_ARGS__ }
56
57 #define ZERO_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(0, __VA_ARGS__)
58 #define ONE_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(1, __VA_ARGS__)
59 #define TWO_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(2, __VA_ARGS__)
60 #define THREE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(3, __VA_ARGS__)
61 #define FOUR_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(4, __VA_ARGS__)
62 #define FIVE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(5, __VA_ARGS__)
63 #define SIX_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(6, __VA_ARGS__)
64
65 struct InstructionDumper {
66 public:
67 HInstruction* ins_;
68 };
69
70 inline bool operator==(const InstructionDumper& a, const InstructionDumper& b) {
71 return a.ins_ == b.ins_;
72 }
73 inline bool operator!=(const InstructionDumper& a, const InstructionDumper& b) {
74 return !(a == b);
75 }
76
77 inline std::ostream& operator<<(std::ostream& os, const InstructionDumper& id) {
78 if (id.ins_ == nullptr) {
79 return os << "NULL";
80 } else {
81 return os << "(" << id.ins_ << "): " << id.ins_->DumpWithArgs();
82 }
83 }
84
85 #define EXPECT_INS_EQ(a, b) EXPECT_EQ(InstructionDumper{a}, InstructionDumper{b})
86 #define EXPECT_INS_REMOVED(a) EXPECT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
87 #define EXPECT_INS_RETAINED(a) EXPECT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
88 #define ASSERT_INS_EQ(a, b) ASSERT_EQ(InstructionDumper{a}, InstructionDumper{b})
89 #define ASSERT_INS_REMOVED(a) ASSERT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
90 #define ASSERT_INS_RETAINED(a) ASSERT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
91
92 inline LiveInterval* BuildInterval(const size_t ranges[][2],
93 size_t number_of_ranges,
94 ScopedArenaAllocator* allocator,
95 int reg = -1,
96 HInstruction* defined_by = nullptr) {
97 LiveInterval* interval =
98 LiveInterval::MakeInterval(allocator, DataType::Type::kInt32, defined_by);
99 if (defined_by != nullptr) {
100 defined_by->SetLiveInterval(interval);
101 }
102 for (size_t i = number_of_ranges; i > 0; --i) {
103 interval->AddRange(ranges[i - 1][0], ranges[i - 1][1]);
104 }
105 interval->SetRegister(reg);
106 return interval;
107 }
108
RemoveSuspendChecks(HGraph * graph)109 inline void RemoveSuspendChecks(HGraph* graph) {
110 for (HBasicBlock* block : graph->GetBlocks()) {
111 if (block != nullptr) {
112 if (block->GetLoopInformation() != nullptr) {
113 block->GetLoopInformation()->SetSuspendCheck(nullptr);
114 }
115 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
116 HInstruction* current = it.Current();
117 if (current->IsSuspendCheck()) {
118 current->GetBlock()->RemoveInstruction(current);
119 }
120 }
121 }
122 }
123 }
124
125 class ArenaPoolAndAllocator {
126 public:
ArenaPoolAndAllocator()127 ArenaPoolAndAllocator()
128 : pool_(), allocator_(&pool_), arena_stack_(&pool_), scoped_allocator_(&arena_stack_) { }
129
GetAllocator()130 ArenaAllocator* GetAllocator() { return &allocator_; }
GetArenaStack()131 ArenaStack* GetArenaStack() { return &arena_stack_; }
GetScopedAllocator()132 ScopedArenaAllocator* GetScopedAllocator() { return &scoped_allocator_; }
133
134 private:
135 MallocArenaPool pool_;
136 ArenaAllocator allocator_;
137 ArenaStack arena_stack_;
138 ScopedArenaAllocator scoped_allocator_;
139 };
140
141 class AdjacencyListGraph {
142 public:
143 using Edge = std::pair<const std::string_view, const std::string_view>;
AdjacencyListGraph(HGraph * graph,ArenaAllocator * alloc,const std::string_view entry_name,const std::string_view exit_name,const std::vector<Edge> & adj)144 AdjacencyListGraph(
145 HGraph* graph,
146 ArenaAllocator* alloc,
147 const std::string_view entry_name,
148 const std::string_view exit_name,
149 const std::vector<Edge>& adj) : graph_(graph) {
150 auto create_block = [&]() {
151 HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
152 graph_->AddBlock(blk);
153 return blk;
154 };
155 HBasicBlock* entry = create_block();
156 HBasicBlock* exit = create_block();
157 graph_->SetEntryBlock(entry);
158 graph_->SetExitBlock(exit);
159 name_to_block_.Put(entry_name, entry);
160 name_to_block_.Put(exit_name, exit);
161 for (const auto& [src, dest] : adj) {
162 HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
163 HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
164 src_blk->AddSuccessor(dest_blk);
165 }
166 graph_->ClearReachabilityInformation();
167 graph_->ComputeDominanceInformation();
168 graph_->ComputeReachabilityInformation();
169 for (auto [name, blk] : name_to_block_) {
170 block_to_name_.Put(blk, name);
171 }
172 }
173
HasBlock(const HBasicBlock * blk)174 bool HasBlock(const HBasicBlock* blk) const {
175 return block_to_name_.find(blk) != block_to_name_.end();
176 }
177
GetName(const HBasicBlock * blk)178 std::string_view GetName(const HBasicBlock* blk) const {
179 return block_to_name_.Get(blk);
180 }
181
Get(const std::string_view & sv)182 HBasicBlock* Get(const std::string_view& sv) const {
183 return name_to_block_.Get(sv);
184 }
185
186 AdjacencyListGraph(AdjacencyListGraph&&) = default;
187 AdjacencyListGraph(const AdjacencyListGraph&) = default;
188 AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
189 AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
190
Dump(std::ostream & os)191 std::ostream& Dump(std::ostream& os) const {
192 struct Namer : public BlockNamer {
193 public:
194 explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
195 std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
196 if (alg_.HasBlock(blk)) {
197 return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
198 } else {
199 return os << "<Unnamed B" << blk->GetBlockId() << ">";
200 }
201 }
202
203 const AdjacencyListGraph& alg_;
204 };
205 Namer namer(*this);
206 return graph_->Dump(os, namer);
207 }
208
209 private:
210 HGraph* graph_;
211 SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
212 SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
213 };
214
215 // Have a separate helper so the OptimizingCFITest can inherit it without causing
216 // multiple inheritance errors from having two gtest as a parent twice.
217 class OptimizingUnitTestHelper {
218 public:
OptimizingUnitTestHelper()219 OptimizingUnitTestHelper()
220 : pool_and_allocator_(new ArenaPoolAndAllocator()),
221 graph_(nullptr),
222 entry_block_(nullptr),
223 return_block_(nullptr),
224 exit_block_(nullptr) { }
225
GetAllocator()226 ArenaAllocator* GetAllocator() { return pool_and_allocator_->GetAllocator(); }
GetArenaStack()227 ArenaStack* GetArenaStack() { return pool_and_allocator_->GetArenaStack(); }
GetScopedAllocator()228 ScopedArenaAllocator* GetScopedAllocator() { return pool_and_allocator_->GetScopedAllocator(); }
229
ResetPoolAndAllocator()230 void ResetPoolAndAllocator() {
231 pool_and_allocator_.reset(new ArenaPoolAndAllocator());
232 }
233
234 HGraph* CreateGraph(VariableSizedHandleScope* handles = nullptr) {
235 ArenaAllocator* const allocator = pool_and_allocator_->GetAllocator();
236
237 // Reserve a big array of 0s so the dex file constructor can offsets from the header.
238 static constexpr size_t kDexDataSize = 4 * KB;
239 const uint8_t* dex_data = reinterpret_cast<uint8_t*>(allocator->Alloc(kDexDataSize));
240
241 // Create the dex file based on the fake data. Call the constructor so that we can use virtual
242 // functions. Don't use the arena for the StandardDexFile otherwise the dex location leaks.
243 dex_files_.emplace_back(new StandardDexFile(
244 dex_data,
245 sizeof(StandardDexFile::Header),
246 "no_location",
247 /*location_checksum*/ 0,
248 /*oat_dex_file*/ nullptr,
249 /*container*/ nullptr));
250
251 graph_ = new (allocator) HGraph(
252 allocator,
253 pool_and_allocator_->GetArenaStack(),
254 handles,
255 *dex_files_.back(),
256 /*method_idx*/-1,
257 kRuntimeISA);
258 return graph_;
259 }
260
261 // Create a control-flow graph from Dex instructions.
262 HGraph* CreateCFG(const std::vector<uint16_t>& data,
263 DataType::Type return_type = DataType::Type::kInt32,
264 VariableSizedHandleScope* handles = nullptr) {
265 HGraph* graph = CreateGraph(handles);
266
267 // The code item data might not aligned to 4 bytes, copy it to ensure that.
268 const size_t code_item_size = data.size() * sizeof(data.front());
269 void* aligned_data = GetAllocator()->Alloc(code_item_size);
270 memcpy(aligned_data, &data[0], code_item_size);
271 CHECK_ALIGNED(aligned_data, StandardDexFile::CodeItem::kAlignment);
272 const dex::CodeItem* code_item = reinterpret_cast<const dex::CodeItem*>(aligned_data);
273
274 {
275 const DexCompilationUnit* dex_compilation_unit =
276 new (graph->GetAllocator()) DexCompilationUnit(
277 /* class_loader= */ Handle<mirror::ClassLoader>(), // Invalid handle.
278 /* class_linker= */ nullptr,
279 graph->GetDexFile(),
280 code_item,
281 /* class_def_index= */ DexFile::kDexNoIndex16,
282 /* method_idx= */ dex::kDexNoIndex,
283 /* access_flags= */ 0u,
284 /* verified_method= */ nullptr,
285 /* dex_cache= */ Handle<mirror::DexCache>()); // Invalid handle.
286 CodeItemDebugInfoAccessor accessor(graph->GetDexFile(), code_item, /*dex_method_idx*/ 0u);
287 HGraphBuilder builder(graph, dex_compilation_unit, accessor, return_type);
288 bool graph_built = (builder.BuildGraph() == kAnalysisSuccess);
289 return graph_built ? graph : nullptr;
290 }
291 }
292
293 void InitGraph(VariableSizedHandleScope* handles = nullptr) {
294 CreateGraph(handles);
295 entry_block_ = AddNewBlock();
296 return_block_ = AddNewBlock();
297 exit_block_ = AddNewBlock();
298
299 graph_->SetEntryBlock(entry_block_);
300 graph_->SetExitBlock(exit_block_);
301
302 entry_block_->AddSuccessor(return_block_);
303 return_block_->AddSuccessor(exit_block_);
304
305 return_block_->AddInstruction(new (GetAllocator()) HReturnVoid());
306 exit_block_->AddInstruction(new (GetAllocator()) HExit());
307 }
308
AddParameter(HInstruction * parameter)309 void AddParameter(HInstruction* parameter) {
310 entry_block_->AddInstruction(parameter);
311 parameters_.push_back(parameter);
312 }
313
AddNewBlock()314 HBasicBlock* AddNewBlock() {
315 HBasicBlock* block = new (GetAllocator()) HBasicBlock(graph_);
316 graph_->AddBlock(block);
317 return block;
318 }
319
320 // Run GraphChecker with all checks.
321 //
322 // Return: the status whether the run is successful.
323 bool CheckGraph(HGraph* graph, std::ostream& oss = std::cerr) {
324 return CheckGraph(graph, /*check_ref_type_info=*/true, oss);
325 }
326
327 bool CheckGraph(std::ostream& oss = std::cerr) {
328 return CheckGraph(graph_, oss);
329 }
330
331 // Run GraphChecker with all checks except reference type information checks.
332 //
333 // Return: the status whether the run is successful.
334 bool CheckGraphSkipRefTypeInfoChecks(HGraph* graph, std::ostream& oss = std::cerr) {
335 return CheckGraph(graph, /*check_ref_type_info=*/false, oss);
336 }
337
338 bool CheckGraphSkipRefTypeInfoChecks(std::ostream& oss = std::cerr) {
339 return CheckGraphSkipRefTypeInfoChecks(graph_, oss);
340 }
341
ManuallyBuildEnvFor(HInstruction * instruction,ArenaVector<HInstruction * > * current_locals)342 HEnvironment* ManuallyBuildEnvFor(HInstruction* instruction,
343 ArenaVector<HInstruction*>* current_locals) {
344 HEnvironment* environment = new (GetAllocator()) HEnvironment(
345 (GetAllocator()),
346 current_locals->size(),
347 graph_->GetArtMethod(),
348 instruction->GetDexPc(),
349 instruction);
350
351 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals));
352 instruction->SetRawEnvironment(environment);
353 return environment;
354 }
355
EnsurePredecessorOrder(HBasicBlock * target,std::initializer_list<HBasicBlock * > preds)356 void EnsurePredecessorOrder(HBasicBlock* target, std::initializer_list<HBasicBlock*> preds) {
357 // Make sure the given preds and block predecessors have the same blocks.
358 BitVector bv(preds.size(), false, Allocator::GetMallocAllocator());
359 auto preds_and_idx = ZipCount(MakeIterationRange(target->GetPredecessors()));
360 bool correct_preds = preds.size() == target->GetPredecessors().size() &&
361 std::all_of(preds.begin(), preds.end(), [&](HBasicBlock* pred) {
362 return std::any_of(preds_and_idx.begin(),
363 preds_and_idx.end(),
364 // Make sure every target predecessor is used only
365 // once.
366 [&](std::pair<HBasicBlock*, uint32_t> cur) {
367 if (cur.first == pred && !bv.IsBitSet(cur.second)) {
368 bv.SetBit(cur.second);
369 return true;
370 } else {
371 return false;
372 }
373 });
374 }) &&
375 bv.NumSetBits() == preds.size();
376 auto dump_list = [](auto it) {
377 std::ostringstream oss;
378 oss << "[";
379 bool first = true;
380 for (HBasicBlock* b : it) {
381 if (!first) {
382 oss << ", ";
383 }
384 first = false;
385 oss << b->GetBlockId();
386 }
387 oss << "]";
388 return oss.str();
389 };
390 ASSERT_TRUE(correct_preds) << "Predecessors of " << target->GetBlockId() << " are "
391 << dump_list(target->GetPredecessors()) << " not "
392 << dump_list(preds);
393 if (correct_preds) {
394 std::copy(preds.begin(), preds.end(), target->predecessors_.begin());
395 }
396 }
397
SetupFromAdjacencyList(const std::string_view entry_name,const std::string_view exit_name,const std::vector<AdjacencyListGraph::Edge> & adj)398 AdjacencyListGraph SetupFromAdjacencyList(const std::string_view entry_name,
399 const std::string_view exit_name,
400 const std::vector<AdjacencyListGraph::Edge>& adj) {
401 return AdjacencyListGraph(graph_, GetAllocator(), entry_name, exit_name, adj);
402 }
403
ManuallyBuildEnvFor(HInstruction * ins,const std::initializer_list<HInstruction * > & env)404 void ManuallyBuildEnvFor(HInstruction* ins, const std::initializer_list<HInstruction*>& env) {
405 ArenaVector<HInstruction*> current_locals(env, GetAllocator()->Adapter(kArenaAllocInstruction));
406 OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, ¤t_locals);
407 }
408
409 HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt,
410 std::optional<Handle<mirror::Class>> klass = std::nullopt) {
411 return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
412 ti ? *ti : dex::TypeIndex(class_idx_++),
413 graph_->GetDexFile(),
414 /* klass= */ klass ? *klass : null_klass_,
415 /* is_referrers_class= */ false,
416 /* dex_pc= */ 0,
417 /* needs_access_check= */ false);
418 }
419
420 HNewInstance* MakeNewInstance(HInstruction* cls, uint32_t dex_pc = 0u) {
421 EXPECT_TRUE(cls->IsLoadClass() || cls->IsClinitCheck()) << *cls;
422 HLoadClass* load =
423 cls->IsLoadClass() ? cls->AsLoadClass() : cls->AsClinitCheck()->GetLoadClass();
424 return new (GetAllocator()) HNewInstance(cls,
425 dex_pc,
426 load->GetTypeIndex(),
427 graph_->GetDexFile(),
428 /* finalizable= */ false,
429 QuickEntrypointEnum::kQuickAllocObjectInitialized);
430 }
431
432 HInstanceFieldSet* MakeIFieldSet(HInstruction* inst,
433 HInstruction* data,
434 MemberOffset off,
435 uint32_t dex_pc = 0u) {
436 return new (GetAllocator()) HInstanceFieldSet(inst,
437 data,
438 /* field= */ nullptr,
439 /* field_type= */ data->GetType(),
440 /* field_offset= */ off,
441 /* is_volatile= */ false,
442 /* field_idx= */ 0,
443 /* declaring_class_def_index= */ 0,
444 graph_->GetDexFile(),
445 dex_pc);
446 }
447
448 HInstanceFieldGet* MakeIFieldGet(HInstruction* inst,
449 DataType::Type type,
450 MemberOffset off,
451 uint32_t dex_pc = 0u) {
452 return new (GetAllocator()) HInstanceFieldGet(inst,
453 /* field= */ nullptr,
454 /* field_type= */ type,
455 /* field_offset= */ off,
456 /* is_volatile= */ false,
457 /* field_idx= */ 0,
458 /* declaring_class_def_index= */ 0,
459 graph_->GetDexFile(),
460 dex_pc);
461 }
462
MakeInvoke(DataType::Type return_type,const std::vector<HInstruction * > & args)463 HInvokeStaticOrDirect* MakeInvoke(DataType::Type return_type,
464 const std::vector<HInstruction*>& args) {
465 MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++};
466 HInvokeStaticOrDirect* res = new (GetAllocator())
467 HInvokeStaticOrDirect(GetAllocator(),
468 args.size(),
469 return_type,
470 /* dex_pc= */ 0,
471 method_reference,
472 /* resolved_method= */ nullptr,
473 HInvokeStaticOrDirect::DispatchInfo{},
474 InvokeType::kStatic,
475 /* resolved_method_reference= */ method_reference,
476 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
477 for (auto [ins, idx] : ZipCount(MakeIterationRange(args))) {
478 res->SetRawInputAt(idx, ins);
479 }
480 return res;
481 }
482
MakePhi(const std::vector<HInstruction * > & ins)483 HPhi* MakePhi(const std::vector<HInstruction*>& ins) {
484 EXPECT_GE(ins.size(), 2u) << "Phi requires at least 2 inputs";
485 HPhi* phi =
486 new (GetAllocator()) HPhi(GetAllocator(), kNoRegNumber, ins.size(), ins[0]->GetType());
487 for (auto [i, idx] : ZipCount(MakeIterationRange(ins))) {
488 phi->SetRawInputAt(idx, i);
489 }
490 return phi;
491 }
492
SetupExit(HBasicBlock * exit)493 void SetupExit(HBasicBlock* exit) {
494 exit->AddInstruction(new (GetAllocator()) HExit());
495 }
496
DefaultTypeIndexForType(DataType::Type type)497 dex::TypeIndex DefaultTypeIndexForType(DataType::Type type) {
498 switch (type) {
499 case DataType::Type::kBool:
500 return dex::TypeIndex(1);
501 case DataType::Type::kUint8:
502 case DataType::Type::kInt8:
503 return dex::TypeIndex(2);
504 case DataType::Type::kUint16:
505 case DataType::Type::kInt16:
506 return dex::TypeIndex(3);
507 case DataType::Type::kUint32:
508 case DataType::Type::kInt32:
509 return dex::TypeIndex(4);
510 case DataType::Type::kUint64:
511 case DataType::Type::kInt64:
512 return dex::TypeIndex(5);
513 case DataType::Type::kReference:
514 return dex::TypeIndex(6);
515 case DataType::Type::kFloat32:
516 return dex::TypeIndex(7);
517 case DataType::Type::kFloat64:
518 return dex::TypeIndex(8);
519 case DataType::Type::kVoid:
520 EXPECT_TRUE(false) << "No type for void!";
521 return dex::TypeIndex(1000);
522 }
523 }
524
525 // Creates a parameter. The instruction is automatically added to the entry-block
526 HParameterValue* MakeParam(DataType::Type type, std::optional<dex::TypeIndex> ti = std::nullopt) {
527 HParameterValue* val = new (GetAllocator()) HParameterValue(
528 graph_->GetDexFile(), ti ? *ti : DefaultTypeIndexForType(type), param_count_++, type);
529 graph_->GetEntryBlock()->AddInstruction(val);
530 return val;
531 }
532
533 protected:
CheckGraph(HGraph * graph,bool check_ref_type_info,std::ostream & oss)534 bool CheckGraph(HGraph* graph, bool check_ref_type_info, std::ostream& oss) {
535 GraphChecker checker(graph);
536 checker.SetRefTypeInfoCheckEnabled(check_ref_type_info);
537 checker.Run();
538 checker.Dump(oss);
539 return checker.IsValid();
540 }
541
542 std::vector<std::unique_ptr<const StandardDexFile>> dex_files_;
543 std::unique_ptr<ArenaPoolAndAllocator> pool_and_allocator_;
544
545 HGraph* graph_;
546 HBasicBlock* entry_block_;
547 HBasicBlock* return_block_;
548 HBasicBlock* exit_block_;
549
550 std::vector<HInstruction*> parameters_;
551
552 size_t param_count_ = 0;
553 size_t class_idx_ = 42;
554 uint32_t method_idx_ = 100;
555
556 ScopedNullHandle<mirror::Class> null_klass_;
557 };
558
559 class OptimizingUnitTest : public CommonArtTest, public OptimizingUnitTestHelper {};
560
561 // Naive string diff data type.
562 typedef std::list<std::pair<std::string, std::string>> diff_t;
563
564 // An alias for the empty string used to make it clear that a line is
565 // removed in a diff.
566 static const std::string removed = ""; // NOLINT [runtime/string] [4]
567
568 // Naive patch command: apply a diff to a string.
Patch(const std::string & original,const diff_t & diff)569 inline std::string Patch(const std::string& original, const diff_t& diff) {
570 std::string result = original;
571 for (const auto& p : diff) {
572 std::string::size_type pos = result.find(p.first);
573 DCHECK_NE(pos, std::string::npos)
574 << "Could not find: \"" << p.first << "\" in \"" << result << "\"";
575 result.replace(pos, p.first.size(), p.second);
576 }
577 return result;
578 }
579
580 // Returns if the instruction is removed from the graph.
IsRemoved(HInstruction * instruction)581 inline bool IsRemoved(HInstruction* instruction) {
582 return instruction->GetBlock() == nullptr;
583 }
584
585 inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
586 return alg.Dump(oss);
587 }
588
589 class PatternMatchGraphVisitor : public HGraphVisitor {
590 private:
591 struct HandlerWrapper {
592 public:
~HandlerWrapperHandlerWrapper593 virtual ~HandlerWrapper() {}
594 virtual void operator()(HInstruction* h) = 0;
595 };
596
597 template <HInstruction::InstructionKind kKind, typename F>
598 struct KindWrapper;
599
600 #define GEN_HANDLER(nm, unused) \
601 template <typename F> \
602 struct KindWrapper<HInstruction::InstructionKind::k##nm, F> : public HandlerWrapper { \
603 public: \
604 explicit KindWrapper(F f) : f_(f) {} \
605 void operator()(HInstruction* h) override { \
606 if constexpr (std::is_invocable_v<F, H##nm*>) { \
607 f_(h->As##nm()); \
608 } else { \
609 LOG(FATAL) << "Incorrect call with " << #nm; \
610 } \
611 } \
612 \
613 private: \
614 F f_; \
615 };
616
FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)617 FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)
618 #undef GEN_HANDLER
619
620 template <typename F>
621 std::unique_ptr<HandlerWrapper> GetWrapper(HInstruction::InstructionKind kind, F f) {
622 switch (kind) {
623 #define GEN_GETTER(nm, unused) \
624 case HInstruction::InstructionKind::k##nm: \
625 return std::unique_ptr<HandlerWrapper>( \
626 new KindWrapper<HInstruction::InstructionKind::k##nm, F>(f));
627 FOR_EACH_CONCRETE_INSTRUCTION(GEN_GETTER)
628 #undef GEN_GETTER
629 default:
630 LOG(FATAL) << "Unable to handle kind " << kind;
631 return nullptr;
632 }
633 }
634
635 public:
636 template <typename... Inst>
PatternMatchGraphVisitor(HGraph * graph,Inst...handlers)637 explicit PatternMatchGraphVisitor(HGraph* graph, Inst... handlers) : HGraphVisitor(graph) {
638 FillHandlers(handlers...);
639 }
640
VisitInstruction(HInstruction * instruction)641 void VisitInstruction(HInstruction* instruction) override {
642 auto& h = handlers_[instruction->GetKind()];
643 if (h.get() != nullptr) {
644 (*h)(instruction);
645 }
646 }
647
648 private:
649 template <typename Func>
GetKind()650 constexpr HInstruction::InstructionKind GetKind() {
651 #define CHECK_INST(nm, unused) \
652 if constexpr (std::is_invocable_v<Func, H##nm*>) { \
653 return HInstruction::InstructionKind::k##nm; \
654 }
655 FOR_EACH_CONCRETE_INSTRUCTION(CHECK_INST);
656 #undef CHECK_INST
657 static_assert(!std::is_invocable_v<Func, HInstruction*>,
658 "Use on generic HInstruction not allowed");
659 #define STATIC_ASSERT_ABSTRACT(nm, unused) && !std::is_invocable_v<Func, H##nm*>
660 static_assert(true FOR_EACH_ABSTRACT_INSTRUCTION(STATIC_ASSERT_ABSTRACT),
661 "Must not be abstract instruction");
662 #undef STATIC_ASSERT_ABSTRACT
663 #define STATIC_ASSERT_CONCRETE(nm, unused) || std::is_invocable_v<Func, H##nm*>
664 static_assert(false FOR_EACH_CONCRETE_INSTRUCTION(STATIC_ASSERT_CONCRETE),
665 "Must be a concrete instruction");
666 #undef STATIC_ASSERT_CONCRETE
667 return HInstruction::InstructionKind::kLastInstructionKind;
668 }
669 template <typename First>
FillHandlers(First h1)670 void FillHandlers(First h1) {
671 HInstruction::InstructionKind type = GetKind<First>();
672 CHECK_NE(type, HInstruction::kLastInstructionKind)
673 << "Unknown instruction kind. Only concrete ones please.";
674 handlers_[type] = GetWrapper(type, h1);
675 }
676
677 template <typename First, typename... Inst>
FillHandlers(First h1,Inst...handlers)678 void FillHandlers(First h1, Inst... handlers) {
679 FillHandlers(h1);
680 FillHandlers<Inst...>(handlers...);
681 }
682
683 std::array<std::unique_ptr<HandlerWrapper>, HInstruction::InstructionKind::kLastInstructionKind>
684 handlers_;
685 };
686
687 template <typename... Target>
688 std::tuple<std::vector<Target*>...> FindAllInstructions(
689 HGraph* graph,
690 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
691 std::nullopt) {
692 std::tuple<std::vector<Target*>...> res;
693 PatternMatchGraphVisitor vis(
694 graph, [&](Target* t) { std::get<std::vector<Target*>>(res).push_back(t); }...);
695
696 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
697 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
698 vis.VisitBasicBlock(blk);
699 }
700 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
701 vis.VisitInsertionOrder();
702 } else {
703 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
704 }
705 return res;
706 }
707
708 template <typename... Target>
709 std::tuple<Target*...> FindSingleInstructions(
710 HGraph* graph,
711 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
712 std::nullopt) {
713 std::tuple<Target*...> res;
714 PatternMatchGraphVisitor vis(graph, [&](Target* t) {
715 EXPECT_EQ(std::get<Target*>(res), nullptr)
716 << *std::get<Target*>(res) << " already found but found " << *t << "!";
717 std::get<Target*>(res) = t;
718 }...);
719 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
720 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
721 vis.VisitBasicBlock(blk);
722 }
723 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
724 vis.VisitInsertionOrder();
725 } else {
726 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
727 }
728 return res;
729 }
730
731 template <typename Target>
732 Target* FindSingleInstruction(
733 HGraph* graph,
734 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
735 std::nullopt) {
736 return std::get<Target*>(FindSingleInstructions<Target>(graph, blks));
737 }
738
739 } // namespace art
740
741 #endif // ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
742