1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
18 #define ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
19
20 #include <memory>
21 #include <ostream>
22 #include <string_view>
23 #include <string>
24 #include <tuple>
25 #include <vector>
26 #include <variant>
27
28 #include "base/macros.h"
29 #include "base/indenter.h"
30 #include "base/malloc_arena_pool.h"
31 #include "base/scoped_arena_allocator.h"
32 #include "builder.h"
33 #include "common_compiler_test.h"
34 #include "dex/code_item_accessors-inl.h"
35 #include "dex/dex_file.h"
36 #include "dex/dex_instruction.h"
37 #include "dex/standard_dex_file.h"
38 #include "driver/dex_compilation_unit.h"
39 #include "graph_checker.h"
40 #include "gtest/gtest.h"
41 #include "handle_scope-inl.h"
42 #include "handle_scope.h"
43 #include "mirror/class_loader.h"
44 #include "mirror/dex_cache.h"
45 #include "nodes.h"
46 #include "scoped_thread_state_change.h"
47 #include "ssa_builder.h"
48 #include "ssa_liveness_analysis.h"
49
50 namespace art HIDDEN {
51
52 #define NUM_INSTRUCTIONS(...) \
53 (sizeof((uint16_t[]) {__VA_ARGS__}) /sizeof(uint16_t))
54
55 #define N_REGISTERS_CODE_ITEM(NUM_REGS, ...) \
56 { NUM_REGS, 0, 0, 0, 0, 0, NUM_INSTRUCTIONS(__VA_ARGS__), 0, __VA_ARGS__ }
57
58 #define ZERO_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(0, __VA_ARGS__)
59 #define ONE_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(1, __VA_ARGS__)
60 #define TWO_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(2, __VA_ARGS__)
61 #define THREE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(3, __VA_ARGS__)
62 #define FOUR_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(4, __VA_ARGS__)
63 #define FIVE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(5, __VA_ARGS__)
64 #define SIX_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(6, __VA_ARGS__)
65
66 struct InstructionDumper {
67 public:
68 HInstruction* ins_;
69 };
70
71 inline bool operator==(const InstructionDumper& a, const InstructionDumper& b) {
72 return a.ins_ == b.ins_;
73 }
74 inline bool operator!=(const InstructionDumper& a, const InstructionDumper& b) {
75 return !(a == b);
76 }
77
78 inline std::ostream& operator<<(std::ostream& os, const InstructionDumper& id) {
79 if (id.ins_ == nullptr) {
80 return os << "NULL";
81 } else {
82 return os << "(" << id.ins_ << "): " << id.ins_->DumpWithArgs();
83 }
84 }
85
86 #define EXPECT_INS_EQ(a, b) EXPECT_EQ(InstructionDumper{a}, InstructionDumper{b})
87 #define EXPECT_INS_REMOVED(a) EXPECT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
88 #define EXPECT_INS_RETAINED(a) EXPECT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
89 #define ASSERT_INS_EQ(a, b) ASSERT_EQ(InstructionDumper{a}, InstructionDumper{b})
90 #define ASSERT_INS_REMOVED(a) ASSERT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
91 #define ASSERT_INS_RETAINED(a) ASSERT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
92
93 inline LiveInterval* BuildInterval(const size_t ranges[][2],
94 size_t number_of_ranges,
95 ScopedArenaAllocator* allocator,
96 int reg = -1,
97 HInstruction* defined_by = nullptr) {
98 LiveInterval* interval =
99 LiveInterval::MakeInterval(allocator, DataType::Type::kInt32, defined_by);
100 if (defined_by != nullptr) {
101 defined_by->SetLiveInterval(interval);
102 }
103 for (size_t i = number_of_ranges; i > 0; --i) {
104 interval->AddRange(ranges[i - 1][0], ranges[i - 1][1]);
105 }
106 interval->SetRegister(reg);
107 return interval;
108 }
109
RemoveSuspendChecks(HGraph * graph)110 inline void RemoveSuspendChecks(HGraph* graph) {
111 for (HBasicBlock* block : graph->GetBlocks()) {
112 if (block != nullptr) {
113 if (block->GetLoopInformation() != nullptr) {
114 block->GetLoopInformation()->SetSuspendCheck(nullptr);
115 }
116 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
117 HInstruction* current = it.Current();
118 if (current->IsSuspendCheck()) {
119 current->GetBlock()->RemoveInstruction(current);
120 }
121 }
122 }
123 }
124 }
125
126 class ArenaPoolAndAllocator {
127 public:
ArenaPoolAndAllocator()128 ArenaPoolAndAllocator()
129 : pool_(), allocator_(&pool_), arena_stack_(&pool_), scoped_allocator_(&arena_stack_) { }
130
GetAllocator()131 ArenaAllocator* GetAllocator() { return &allocator_; }
GetArenaStack()132 ArenaStack* GetArenaStack() { return &arena_stack_; }
GetScopedAllocator()133 ScopedArenaAllocator* GetScopedAllocator() { return &scoped_allocator_; }
134
135 private:
136 MallocArenaPool pool_;
137 ArenaAllocator allocator_;
138 ArenaStack arena_stack_;
139 ScopedArenaAllocator scoped_allocator_;
140 };
141
142 class AdjacencyListGraph {
143 public:
144 using Edge = std::pair<const std::string_view, const std::string_view>;
AdjacencyListGraph(HGraph * graph,ArenaAllocator * alloc,const std::string_view entry_name,const std::string_view exit_name,const std::vector<Edge> & adj)145 AdjacencyListGraph(
146 HGraph* graph,
147 ArenaAllocator* alloc,
148 const std::string_view entry_name,
149 const std::string_view exit_name,
150 const std::vector<Edge>& adj) : graph_(graph) {
151 auto create_block = [&]() {
152 HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
153 graph_->AddBlock(blk);
154 return blk;
155 };
156 HBasicBlock* entry = create_block();
157 HBasicBlock* exit = create_block();
158 graph_->SetEntryBlock(entry);
159 graph_->SetExitBlock(exit);
160 name_to_block_.Put(entry_name, entry);
161 name_to_block_.Put(exit_name, exit);
162 for (const auto& [src, dest] : adj) {
163 HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
164 HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
165 src_blk->AddSuccessor(dest_blk);
166 }
167 graph_->ComputeDominanceInformation();
168 for (auto [name, blk] : name_to_block_) {
169 block_to_name_.Put(blk, name);
170 }
171 }
172
HasBlock(const HBasicBlock * blk)173 bool HasBlock(const HBasicBlock* blk) const {
174 return block_to_name_.find(blk) != block_to_name_.end();
175 }
176
GetName(const HBasicBlock * blk)177 std::string_view GetName(const HBasicBlock* blk) const {
178 return block_to_name_.Get(blk);
179 }
180
Get(const std::string_view & sv)181 HBasicBlock* Get(const std::string_view& sv) const {
182 return name_to_block_.Get(sv);
183 }
184
185 AdjacencyListGraph(AdjacencyListGraph&&) = default;
186 AdjacencyListGraph(const AdjacencyListGraph&) = default;
187 AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
188 AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
189
Dump(std::ostream & os)190 std::ostream& Dump(std::ostream& os) const {
191 struct Namer : public BlockNamer {
192 public:
193 explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
194 std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
195 if (alg_.HasBlock(blk)) {
196 return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
197 } else {
198 return os << "<Unnamed B" << blk->GetBlockId() << ">";
199 }
200 }
201
202 const AdjacencyListGraph& alg_;
203 };
204 Namer namer(*this);
205 return graph_->Dump(os, /* codegen_= */ nullptr, namer);
206 }
207
208 private:
209 HGraph* graph_;
210 SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
211 SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
212 };
213
214 // Have a separate helper so the OptimizingCFITest can inherit it without causing
215 // multiple inheritance errors from having two gtest as a parent twice.
216 class OptimizingUnitTestHelper {
217 public:
OptimizingUnitTestHelper()218 OptimizingUnitTestHelper()
219 : pool_and_allocator_(new ArenaPoolAndAllocator()),
220 graph_(nullptr),
221 entry_block_(nullptr),
222 return_block_(nullptr),
223 exit_block_(nullptr) { }
224
GetAllocator()225 ArenaAllocator* GetAllocator() { return pool_and_allocator_->GetAllocator(); }
GetArenaStack()226 ArenaStack* GetArenaStack() { return pool_and_allocator_->GetArenaStack(); }
GetScopedAllocator()227 ScopedArenaAllocator* GetScopedAllocator() { return pool_and_allocator_->GetScopedAllocator(); }
228
ResetPoolAndAllocator()229 void ResetPoolAndAllocator() {
230 pool_and_allocator_.reset(new ArenaPoolAndAllocator());
231 }
232
233 HGraph* CreateGraph(VariableSizedHandleScope* handles = nullptr) {
234 ArenaAllocator* const allocator = pool_and_allocator_->GetAllocator();
235
236 // Reserve a big array of 0s so the dex file constructor can offsets from the header.
237 static constexpr size_t kDexDataSize = 4 * KB;
238 const uint8_t* dex_data = reinterpret_cast<uint8_t*>(allocator->Alloc(kDexDataSize));
239
240 // Create the dex file based on the fake data. Call the constructor so that we can use virtual
241 // functions. Don't use the arena for the StandardDexFile otherwise the dex location leaks.
242 auto container =
243 std::make_shared<MemoryDexFileContainer>(dex_data, sizeof(StandardDexFile::Header));
244 dex_files_.emplace_back(new StandardDexFile(dex_data,
245 "no_location",
246 /*location_checksum*/ 0,
247 /*oat_dex_file*/ nullptr,
248 std::move(container)));
249
250 graph_ = new (allocator) HGraph(
251 allocator,
252 pool_and_allocator_->GetArenaStack(),
253 handles,
254 *dex_files_.back(),
255 /*method_idx*/-1,
256 kRuntimeISA);
257 return graph_;
258 }
259
260 // Create a control-flow graph from Dex instructions.
261 HGraph* CreateCFG(const std::vector<uint16_t>& data,
262 DataType::Type return_type = DataType::Type::kInt32) {
263 ScopedObjectAccess soa(Thread::Current());
264 VariableSizedHandleScope handles(soa.Self());
265 HGraph* graph = CreateGraph(&handles);
266
267 // The code item data might not aligned to 4 bytes, copy it to ensure that.
268 const size_t code_item_size = data.size() * sizeof(data.front());
269 void* aligned_data = GetAllocator()->Alloc(code_item_size);
270 memcpy(aligned_data, &data[0], code_item_size);
271 CHECK_ALIGNED(aligned_data, StandardDexFile::CodeItem::kAlignment);
272 const dex::CodeItem* code_item = reinterpret_cast<const dex::CodeItem*>(aligned_data);
273
274 {
275 const DexCompilationUnit* dex_compilation_unit =
276 new (graph->GetAllocator()) DexCompilationUnit(
277 /* class_loader= */ Handle<mirror::ClassLoader>(), // Invalid handle.
278 /* class_linker= */ nullptr,
279 graph->GetDexFile(),
280 code_item,
281 /* class_def_idx= */ DexFile::kDexNoIndex16,
282 /* method_idx= */ dex::kDexNoIndex,
283 /* access_flags= */ 0u,
284 /* verified_method= */ nullptr,
285 /* dex_cache= */ Handle<mirror::DexCache>()); // Invalid handle.
286 CodeItemDebugInfoAccessor accessor(graph->GetDexFile(), code_item, /*dex_method_idx*/ 0u);
287 HGraphBuilder builder(graph, dex_compilation_unit, accessor, return_type);
288 bool graph_built = (builder.BuildGraph() == kAnalysisSuccess);
289 return graph_built ? graph : nullptr;
290 }
291 }
292
293 void InitGraph(VariableSizedHandleScope* handles = nullptr) {
294 CreateGraph(handles);
295 entry_block_ = AddNewBlock();
296 return_block_ = AddNewBlock();
297 exit_block_ = AddNewBlock();
298
299 graph_->SetEntryBlock(entry_block_);
300 graph_->SetExitBlock(exit_block_);
301
302 entry_block_->AddSuccessor(return_block_);
303 return_block_->AddSuccessor(exit_block_);
304
305 return_block_->AddInstruction(new (GetAllocator()) HReturnVoid());
306 exit_block_->AddInstruction(new (GetAllocator()) HExit());
307 }
308
AddParameter(HInstruction * parameter)309 void AddParameter(HInstruction* parameter) {
310 entry_block_->AddInstruction(parameter);
311 parameters_.push_back(parameter);
312 }
313
AddNewBlock()314 HBasicBlock* AddNewBlock() {
315 HBasicBlock* block = new (GetAllocator()) HBasicBlock(graph_);
316 graph_->AddBlock(block);
317 return block;
318 }
319
320 // Run GraphChecker with all checks.
321 //
322 // Return: the status whether the run is successful.
323 bool CheckGraph(std::ostream& oss = std::cerr) {
324 return CheckGraph(graph_, oss);
325 }
326
ManuallyBuildEnvFor(HInstruction * instruction,ArenaVector<HInstruction * > * current_locals)327 HEnvironment* ManuallyBuildEnvFor(HInstruction* instruction,
328 ArenaVector<HInstruction*>* current_locals) {
329 HEnvironment* environment = new (GetAllocator()) HEnvironment(
330 (GetAllocator()),
331 current_locals->size(),
332 graph_->GetArtMethod(),
333 instruction->GetDexPc(),
334 instruction);
335
336 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals));
337 instruction->SetRawEnvironment(environment);
338 return environment;
339 }
340
EnsurePredecessorOrder(HBasicBlock * target,std::initializer_list<HBasicBlock * > preds)341 void EnsurePredecessorOrder(HBasicBlock* target, std::initializer_list<HBasicBlock*> preds) {
342 // Make sure the given preds and block predecessors have the same blocks.
343 BitVector bv(preds.size(), false, Allocator::GetCallocAllocator());
344 auto preds_and_idx = ZipCount(MakeIterationRange(target->GetPredecessors()));
345 bool correct_preds = preds.size() == target->GetPredecessors().size() &&
346 std::all_of(preds.begin(), preds.end(), [&](HBasicBlock* pred) {
347 return std::any_of(preds_and_idx.begin(),
348 preds_and_idx.end(),
349 // Make sure every target predecessor is used only
350 // once.
351 [&](std::pair<HBasicBlock*, uint32_t> cur) {
352 if (cur.first == pred && !bv.IsBitSet(cur.second)) {
353 bv.SetBit(cur.second);
354 return true;
355 } else {
356 return false;
357 }
358 });
359 }) &&
360 bv.NumSetBits() == preds.size();
361 auto dump_list = [](auto it) {
362 std::ostringstream oss;
363 oss << "[";
364 bool first = true;
365 for (HBasicBlock* b : it) {
366 if (!first) {
367 oss << ", ";
368 }
369 first = false;
370 oss << b->GetBlockId();
371 }
372 oss << "]";
373 return oss.str();
374 };
375 ASSERT_TRUE(correct_preds) << "Predecessors of " << target->GetBlockId() << " are "
376 << dump_list(target->GetPredecessors()) << " not "
377 << dump_list(preds);
378 if (correct_preds) {
379 std::copy(preds.begin(), preds.end(), target->predecessors_.begin());
380 }
381 }
382
SetupFromAdjacencyList(const std::string_view entry_name,const std::string_view exit_name,const std::vector<AdjacencyListGraph::Edge> & adj)383 AdjacencyListGraph SetupFromAdjacencyList(const std::string_view entry_name,
384 const std::string_view exit_name,
385 const std::vector<AdjacencyListGraph::Edge>& adj) {
386 return AdjacencyListGraph(graph_, GetAllocator(), entry_name, exit_name, adj);
387 }
388
ManuallyBuildEnvFor(HInstruction * ins,const std::initializer_list<HInstruction * > & env)389 void ManuallyBuildEnvFor(HInstruction* ins, const std::initializer_list<HInstruction*>& env) {
390 ArenaVector<HInstruction*> current_locals(env, GetAllocator()->Adapter(kArenaAllocInstruction));
391 OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, ¤t_locals);
392 }
393
394 HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt,
395 std::optional<Handle<mirror::Class>> klass = std::nullopt) {
396 return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
397 ti ? *ti : dex::TypeIndex(class_idx_++),
398 graph_->GetDexFile(),
399 /* klass= */ klass ? *klass : null_klass_,
400 /* is_referrers_class= */ false,
401 /* dex_pc= */ 0,
402 /* needs_access_check= */ false);
403 }
404
405 HNewInstance* MakeNewInstance(HInstruction* cls, uint32_t dex_pc = 0u) {
406 EXPECT_TRUE(cls->IsLoadClass() || cls->IsClinitCheck()) << *cls;
407 HLoadClass* load =
408 cls->IsLoadClass() ? cls->AsLoadClass() : cls->AsClinitCheck()->GetLoadClass();
409 return new (GetAllocator()) HNewInstance(cls,
410 dex_pc,
411 load->GetTypeIndex(),
412 graph_->GetDexFile(),
413 /* finalizable= */ false,
414 QuickEntrypointEnum::kQuickAllocObjectInitialized);
415 }
416
417 HInstanceFieldSet* MakeIFieldSet(HInstruction* inst,
418 HInstruction* data,
419 MemberOffset off,
420 uint32_t dex_pc = 0u) {
421 return new (GetAllocator()) HInstanceFieldSet(inst,
422 data,
423 /* field= */ nullptr,
424 /* field_type= */ data->GetType(),
425 /* field_offset= */ off,
426 /* is_volatile= */ false,
427 /* field_idx= */ 0,
428 /* declaring_class_def_index= */ 0,
429 graph_->GetDexFile(),
430 dex_pc);
431 }
432
433 HInstanceFieldGet* MakeIFieldGet(HInstruction* inst,
434 DataType::Type type,
435 MemberOffset off,
436 uint32_t dex_pc = 0u) {
437 return new (GetAllocator()) HInstanceFieldGet(inst,
438 /* field= */ nullptr,
439 /* field_type= */ type,
440 /* field_offset= */ off,
441 /* is_volatile= */ false,
442 /* field_idx= */ 0,
443 /* declaring_class_def_index= */ 0,
444 graph_->GetDexFile(),
445 dex_pc);
446 }
447
MakeInvoke(DataType::Type return_type,const std::vector<HInstruction * > & args)448 HInvokeStaticOrDirect* MakeInvoke(DataType::Type return_type,
449 const std::vector<HInstruction*>& args) {
450 MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++};
451 HInvokeStaticOrDirect* res = new (GetAllocator())
452 HInvokeStaticOrDirect(GetAllocator(),
453 args.size(),
454 return_type,
455 /* dex_pc= */ 0,
456 method_reference,
457 /* resolved_method= */ nullptr,
458 HInvokeStaticOrDirect::DispatchInfo{},
459 InvokeType::kStatic,
460 /* resolved_method_reference= */ method_reference,
461 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
462 !graph_->IsDebuggable());
463 for (auto [ins, idx] : ZipCount(MakeIterationRange(args))) {
464 res->SetRawInputAt(idx, ins);
465 }
466 return res;
467 }
468
MakePhi(const std::vector<HInstruction * > & ins)469 HPhi* MakePhi(const std::vector<HInstruction*>& ins) {
470 EXPECT_GE(ins.size(), 2u) << "Phi requires at least 2 inputs";
471 HPhi* phi =
472 new (GetAllocator()) HPhi(GetAllocator(), kNoRegNumber, ins.size(), ins[0]->GetType());
473 for (auto [i, idx] : ZipCount(MakeIterationRange(ins))) {
474 phi->SetRawInputAt(idx, i);
475 }
476 return phi;
477 }
478
SetupExit(HBasicBlock * exit)479 void SetupExit(HBasicBlock* exit) {
480 exit->AddInstruction(new (GetAllocator()) HExit());
481 }
482
DefaultTypeIndexForType(DataType::Type type)483 dex::TypeIndex DefaultTypeIndexForType(DataType::Type type) {
484 switch (type) {
485 case DataType::Type::kBool:
486 return dex::TypeIndex(1);
487 case DataType::Type::kUint8:
488 case DataType::Type::kInt8:
489 return dex::TypeIndex(2);
490 case DataType::Type::kUint16:
491 case DataType::Type::kInt16:
492 return dex::TypeIndex(3);
493 case DataType::Type::kUint32:
494 case DataType::Type::kInt32:
495 return dex::TypeIndex(4);
496 case DataType::Type::kUint64:
497 case DataType::Type::kInt64:
498 return dex::TypeIndex(5);
499 case DataType::Type::kReference:
500 return dex::TypeIndex(6);
501 case DataType::Type::kFloat32:
502 return dex::TypeIndex(7);
503 case DataType::Type::kFloat64:
504 return dex::TypeIndex(8);
505 case DataType::Type::kVoid:
506 EXPECT_TRUE(false) << "No type for void!";
507 return dex::TypeIndex(1000);
508 }
509 }
510
511 // Creates a parameter. The instruction is automatically added to the entry-block
512 HParameterValue* MakeParam(DataType::Type type, std::optional<dex::TypeIndex> ti = std::nullopt) {
513 HParameterValue* val = new (GetAllocator()) HParameterValue(
514 graph_->GetDexFile(), ti ? *ti : DefaultTypeIndexForType(type), param_count_++, type);
515 graph_->GetEntryBlock()->AddInstruction(val);
516 return val;
517 }
518
519 protected:
CheckGraph(HGraph * graph,std::ostream & oss)520 bool CheckGraph(HGraph* graph, std::ostream& oss) {
521 GraphChecker checker(graph);
522 checker.Run();
523 checker.Dump(oss);
524 return checker.IsValid();
525 }
526
527 std::vector<std::unique_ptr<const StandardDexFile>> dex_files_;
528 std::unique_ptr<ArenaPoolAndAllocator> pool_and_allocator_;
529
530 HGraph* graph_;
531 HBasicBlock* entry_block_;
532 HBasicBlock* return_block_;
533 HBasicBlock* exit_block_;
534
535 std::vector<HInstruction*> parameters_;
536
537 size_t param_count_ = 0;
538 size_t class_idx_ = 42;
539 uint32_t method_idx_ = 100;
540
541 ScopedNullHandle<mirror::Class> null_klass_;
542 };
543
544 class OptimizingUnitTest : public CommonArtTest, public OptimizingUnitTestHelper {};
545
546 // Naive string diff data type.
547 using diff_t = std::list<std::pair<std::string, std::string>>;
548
549 // An alias for the empty string used to make it clear that a line is
550 // removed in a diff.
551 static const std::string removed = ""; // NOLINT [runtime/string] [4]
552
553 // Naive patch command: apply a diff to a string.
Patch(const std::string & original,const diff_t & diff)554 inline std::string Patch(const std::string& original, const diff_t& diff) {
555 std::string result = original;
556 for (const auto& p : diff) {
557 std::string::size_type pos = result.find(p.first);
558 DCHECK_NE(pos, std::string::npos)
559 << "Could not find: \"" << p.first << "\" in \"" << result << "\"";
560 result.replace(pos, p.first.size(), p.second);
561 }
562 return result;
563 }
564
565 // Returns if the instruction is removed from the graph.
IsRemoved(HInstruction * instruction)566 inline bool IsRemoved(HInstruction* instruction) {
567 return instruction->GetBlock() == nullptr;
568 }
569
570 inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
571 return alg.Dump(oss);
572 }
573
574 class PatternMatchGraphVisitor final : public HGraphVisitor {
575 private:
576 struct HandlerWrapper {
577 public:
~HandlerWrapperHandlerWrapper578 virtual ~HandlerWrapper() {}
579 virtual void operator()(HInstruction* h) = 0;
580 };
581
582 template <HInstruction::InstructionKind kKind, typename F>
583 struct KindWrapper;
584
585 #define GEN_HANDLER(nm, unused) \
586 template <typename F> \
587 struct KindWrapper<HInstruction::InstructionKind::k##nm, F> : public HandlerWrapper { \
588 public: \
589 explicit KindWrapper(F f) : f_(f) {} \
590 void operator()(HInstruction* h) override { \
591 if constexpr (std::is_invocable_v<F, H##nm*>) { \
592 f_(h->As##nm()); \
593 } else { \
594 LOG(FATAL) << "Incorrect call with " << #nm; \
595 } \
596 } \
597 \
598 private: \
599 F f_; \
600 };
601
FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)602 FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)
603 #undef GEN_HANDLER
604
605 template <typename F>
606 std::unique_ptr<HandlerWrapper> GetWrapper(HInstruction::InstructionKind kind, F f) {
607 switch (kind) {
608 #define GEN_GETTER(nm, unused) \
609 case HInstruction::InstructionKind::k##nm: \
610 return std::unique_ptr<HandlerWrapper>( \
611 new KindWrapper<HInstruction::InstructionKind::k##nm, F>(f));
612 FOR_EACH_CONCRETE_INSTRUCTION(GEN_GETTER)
613 #undef GEN_GETTER
614 default:
615 LOG(FATAL) << "Unable to handle kind " << kind;
616 return nullptr;
617 }
618 }
619
620 public:
621 template <typename... Inst>
PatternMatchGraphVisitor(HGraph * graph,Inst...handlers)622 explicit PatternMatchGraphVisitor(HGraph* graph, Inst... handlers) : HGraphVisitor(graph) {
623 FillHandlers(handlers...);
624 }
625
VisitInstruction(HInstruction * instruction)626 void VisitInstruction(HInstruction* instruction) override {
627 auto& h = handlers_[instruction->GetKind()];
628 if (h.get() != nullptr) {
629 (*h)(instruction);
630 }
631 }
632
633 private:
634 template <typename Func>
GetKind()635 constexpr HInstruction::InstructionKind GetKind() {
636 #define CHECK_INST(nm, unused) \
637 if constexpr (std::is_invocable_v<Func, H##nm*>) { \
638 return HInstruction::InstructionKind::k##nm; \
639 }
640 FOR_EACH_CONCRETE_INSTRUCTION(CHECK_INST);
641 #undef CHECK_INST
642 static_assert(!std::is_invocable_v<Func, HInstruction*>,
643 "Use on generic HInstruction not allowed");
644 #define STATIC_ASSERT_ABSTRACT(nm, unused) && !std::is_invocable_v<Func, H##nm*>
645 static_assert(true FOR_EACH_ABSTRACT_INSTRUCTION(STATIC_ASSERT_ABSTRACT),
646 "Must not be abstract instruction");
647 #undef STATIC_ASSERT_ABSTRACT
648 #define STATIC_ASSERT_CONCRETE(nm, unused) || std::is_invocable_v<Func, H##nm*>
649 static_assert(false FOR_EACH_CONCRETE_INSTRUCTION(STATIC_ASSERT_CONCRETE),
650 "Must be a concrete instruction");
651 #undef STATIC_ASSERT_CONCRETE
652 return HInstruction::InstructionKind::kLastInstructionKind;
653 }
654 template <typename First>
FillHandlers(First h1)655 void FillHandlers(First h1) {
656 HInstruction::InstructionKind type = GetKind<First>();
657 CHECK_NE(type, HInstruction::kLastInstructionKind)
658 << "Unknown instruction kind. Only concrete ones please.";
659 handlers_[type] = GetWrapper(type, h1);
660 }
661
662 template <typename First, typename... Inst>
FillHandlers(First h1,Inst...handlers)663 void FillHandlers(First h1, Inst... handlers) {
664 FillHandlers(h1);
665 FillHandlers<Inst...>(handlers...);
666 }
667
668 std::array<std::unique_ptr<HandlerWrapper>, HInstruction::InstructionKind::kLastInstructionKind>
669 handlers_;
670 };
671
672 template <typename... Target>
673 std::tuple<std::vector<Target*>...> FindAllInstructions(
674 HGraph* graph,
675 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
676 std::nullopt) {
677 std::tuple<std::vector<Target*>...> res;
678 PatternMatchGraphVisitor vis(
679 graph, [&](Target* t) { std::get<std::vector<Target*>>(res).push_back(t); }...);
680
681 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
682 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
683 vis.VisitBasicBlock(blk);
684 }
685 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
686 vis.VisitInsertionOrder();
687 } else {
688 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
689 }
690 return res;
691 }
692
693 template <typename... Target>
694 std::tuple<Target*...> FindSingleInstructions(
695 HGraph* graph,
696 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
697 std::nullopt) {
698 std::tuple<Target*...> res;
699 PatternMatchGraphVisitor vis(graph, [&](Target* t) {
700 EXPECT_EQ(std::get<Target*>(res), nullptr)
701 << *std::get<Target*>(res) << " already found but found " << *t << "!";
702 std::get<Target*>(res) = t;
703 }...);
704 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
705 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
706 vis.VisitBasicBlock(blk);
707 }
708 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
709 vis.VisitInsertionOrder();
710 } else {
711 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
712 }
713 return res;
714 }
715
716 template <typename Target>
717 Target* FindSingleInstruction(
718 HGraph* graph,
719 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
720 std::nullopt) {
721 return std::get<Target*>(FindSingleInstructions<Target>(graph, blks));
722 }
723
724 } // namespace art
725
726 #endif // ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
727