1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instruction_builder.h"
18
19 #include "art_method-inl.h"
20 #include "base/arena_bit_vector.h"
21 #include "base/bit_vector-inl.h"
22 #include "base/logging.h"
23 #include "block_builder.h"
24 #include "class_linker-inl.h"
25 #include "code_generator.h"
26 #include "data_type-inl.h"
27 #include "dex/bytecode_utils.h"
28 #include "dex/dex_instruction-inl.h"
29 #include "driver/dex_compilation_unit.h"
30 #include "driver/compiler_options.h"
31 #include "entrypoints/entrypoint_utils-inl.h"
32 #include "imtable-inl.h"
33 #include "intrinsics.h"
34 #include "intrinsics_utils.h"
35 #include "jit/jit.h"
36 #include "jit/profiling_info.h"
37 #include "mirror/dex_cache.h"
38 #include "oat/oat_file.h"
39 #include "optimizing_compiler_stats.h"
40 #include "reflective_handle_scope-inl.h"
41 #include "scoped_thread_state_change-inl.h"
42 #include "sharpening.h"
43 #include "ssa_builder.h"
44 #include "well_known_classes.h"
45
46 namespace art HIDDEN {
47
48 namespace {
49
50 class SamePackageCompare {
51 public:
SamePackageCompare(const DexCompilationUnit & dex_compilation_unit)52 explicit SamePackageCompare(const DexCompilationUnit& dex_compilation_unit)
53 : dex_compilation_unit_(dex_compilation_unit) {}
54
operator ()(ObjPtr<mirror::Class> klass)55 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
56 if (klass->GetClassLoader() != dex_compilation_unit_.GetClassLoader().Get()) {
57 return false;
58 }
59 if (referrers_descriptor_ == nullptr) {
60 const DexFile* dex_file = dex_compilation_unit_.GetDexFile();
61 uint32_t referrers_method_idx = dex_compilation_unit_.GetDexMethodIndex();
62 referrers_descriptor_ =
63 dex_file->GetMethodDeclaringClassDescriptor(dex_file->GetMethodId(referrers_method_idx));
64 referrers_package_length_ = PackageLength(referrers_descriptor_);
65 }
66 std::string temp;
67 const char* klass_descriptor = klass->GetDescriptor(&temp);
68 size_t klass_package_length = PackageLength(klass_descriptor);
69 return (referrers_package_length_ == klass_package_length) &&
70 memcmp(referrers_descriptor_, klass_descriptor, referrers_package_length_) == 0;
71 };
72
73 private:
PackageLength(const char * descriptor)74 static size_t PackageLength(const char* descriptor) {
75 const char* slash_pos = strrchr(descriptor, '/');
76 return (slash_pos != nullptr) ? static_cast<size_t>(slash_pos - descriptor) : 0u;
77 }
78
79 const DexCompilationUnit& dex_compilation_unit_;
80 const char* referrers_descriptor_ = nullptr;
81 size_t referrers_package_length_ = 0u;
82 };
83
84 } // anonymous namespace
85
HInstructionBuilder(HGraph * graph,HBasicBlockBuilder * block_builder,SsaBuilder * ssa_builder,const DexFile * dex_file,const CodeItemDebugInfoAccessor & accessor,DataType::Type return_type,const DexCompilationUnit * dex_compilation_unit,const DexCompilationUnit * outer_compilation_unit,CodeGenerator * code_generator,OptimizingCompilerStats * compiler_stats,ScopedArenaAllocator * local_allocator)86 HInstructionBuilder::HInstructionBuilder(HGraph* graph,
87 HBasicBlockBuilder* block_builder,
88 SsaBuilder* ssa_builder,
89 const DexFile* dex_file,
90 const CodeItemDebugInfoAccessor& accessor,
91 DataType::Type return_type,
92 const DexCompilationUnit* dex_compilation_unit,
93 const DexCompilationUnit* outer_compilation_unit,
94 CodeGenerator* code_generator,
95 OptimizingCompilerStats* compiler_stats,
96 ScopedArenaAllocator* local_allocator)
97 : allocator_(graph->GetAllocator()),
98 graph_(graph),
99 dex_file_(dex_file),
100 code_item_accessor_(accessor),
101 return_type_(return_type),
102 block_builder_(block_builder),
103 ssa_builder_(ssa_builder),
104 code_generator_(code_generator),
105 dex_compilation_unit_(dex_compilation_unit),
106 outer_compilation_unit_(outer_compilation_unit),
107 compilation_stats_(compiler_stats),
108 local_allocator_(local_allocator),
109 locals_for_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
110 current_block_(nullptr),
111 current_locals_(nullptr),
112 latest_result_(nullptr),
113 current_this_parameter_(nullptr),
114 loop_headers_(local_allocator->Adapter(kArenaAllocGraphBuilder)),
115 class_cache_(std::less<dex::TypeIndex>(), local_allocator->Adapter(kArenaAllocGraphBuilder)) {
116 loop_headers_.reserve(kDefaultNumberOfLoops);
117 }
118
FindBlockStartingAt(uint32_t dex_pc) const119 HBasicBlock* HInstructionBuilder::FindBlockStartingAt(uint32_t dex_pc) const {
120 return block_builder_->GetBlockAt(dex_pc);
121 }
122
GetLocalsFor(HBasicBlock * block)123 inline ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsFor(HBasicBlock* block) {
124 ScopedArenaVector<HInstruction*>* locals = &locals_for_[block->GetBlockId()];
125 const size_t vregs = graph_->GetNumberOfVRegs();
126 if (locals->size() == vregs) {
127 return locals;
128 }
129 return GetLocalsForWithAllocation(block, locals, vregs);
130 }
131
GetLocalsForWithAllocation(HBasicBlock * block,ScopedArenaVector<HInstruction * > * locals,const size_t vregs)132 ScopedArenaVector<HInstruction*>* HInstructionBuilder::GetLocalsForWithAllocation(
133 HBasicBlock* block,
134 ScopedArenaVector<HInstruction*>* locals,
135 const size_t vregs) {
136 DCHECK_NE(locals->size(), vregs);
137 locals->resize(vregs, nullptr);
138 if (block->IsCatchBlock()) {
139 // We record incoming inputs of catch phis at throwing instructions and
140 // must therefore eagerly create the phis. Phis for undefined vregs will
141 // be deleted when the first throwing instruction with the vreg undefined
142 // is encountered. Unused phis will be removed by dead phi analysis.
143 for (size_t i = 0; i < vregs; ++i) {
144 // No point in creating the catch phi if it is already undefined at
145 // the first throwing instruction.
146 HInstruction* current_local_value = (*current_locals_)[i];
147 if (current_local_value != nullptr) {
148 HPhi* phi = new (allocator_) HPhi(
149 allocator_,
150 i,
151 0,
152 current_local_value->GetType());
153 block->AddPhi(phi);
154 (*locals)[i] = phi;
155 }
156 }
157 }
158 return locals;
159 }
160
ValueOfLocalAt(HBasicBlock * block,size_t local)161 inline HInstruction* HInstructionBuilder::ValueOfLocalAt(HBasicBlock* block, size_t local) {
162 ScopedArenaVector<HInstruction*>* locals = GetLocalsFor(block);
163 return (*locals)[local];
164 }
165
InitializeBlockLocals()166 void HInstructionBuilder::InitializeBlockLocals() {
167 current_locals_ = GetLocalsFor(current_block_);
168
169 if (current_block_->IsCatchBlock()) {
170 // Catch phis were already created and inputs collected from throwing sites.
171 if (kIsDebugBuild) {
172 // Make sure there was at least one throwing instruction which initialized
173 // locals (guaranteed by HGraphBuilder) and that all try blocks have been
174 // visited already (from HTryBoundary scoping and reverse post order).
175 bool catch_block_visited = false;
176 for (HBasicBlock* current : graph_->GetReversePostOrder()) {
177 if (current == current_block_) {
178 catch_block_visited = true;
179 } else if (current->IsTryBlock()) {
180 const HTryBoundary& try_entry = current->GetTryCatchInformation()->GetTryEntry();
181 if (try_entry.HasExceptionHandler(*current_block_)) {
182 DCHECK(!catch_block_visited) << "Catch block visited before its try block.";
183 }
184 }
185 }
186 DCHECK_EQ(current_locals_->size(), graph_->GetNumberOfVRegs())
187 << "No instructions throwing into a live catch block.";
188 }
189 } else if (current_block_->IsLoopHeader()) {
190 // If the block is a loop header, we know we only have visited the pre header
191 // because we are visiting in reverse post order. We create phis for all initialized
192 // locals from the pre header. Their inputs will be populated at the end of
193 // the analysis.
194 for (size_t local = 0; local < current_locals_->size(); ++local) {
195 HInstruction* incoming =
196 ValueOfLocalAt(current_block_->GetLoopInformation()->GetPreHeader(), local);
197 if (incoming != nullptr) {
198 HPhi* phi = new (allocator_) HPhi(
199 allocator_,
200 local,
201 0,
202 incoming->GetType());
203 current_block_->AddPhi(phi);
204 (*current_locals_)[local] = phi;
205 }
206 }
207
208 // Save the loop header so that the last phase of the analysis knows which
209 // blocks need to be updated.
210 loop_headers_.push_back(current_block_);
211 } else if (current_block_->GetPredecessors().size() > 0) {
212 // All predecessors have already been visited because we are visiting in reverse post order.
213 // We merge the values of all locals, creating phis if those values differ.
214 for (size_t local = 0; local < current_locals_->size(); ++local) {
215 bool one_predecessor_has_no_value = false;
216 bool is_different = false;
217 HInstruction* value = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
218
219 for (HBasicBlock* predecessor : current_block_->GetPredecessors()) {
220 HInstruction* current = ValueOfLocalAt(predecessor, local);
221 if (current == nullptr) {
222 one_predecessor_has_no_value = true;
223 break;
224 } else if (current != value) {
225 is_different = true;
226 }
227 }
228
229 if (one_predecessor_has_no_value) {
230 // If one predecessor has no value for this local, we trust the verifier has
231 // successfully checked that there is a store dominating any read after this block.
232 continue;
233 }
234
235 if (is_different) {
236 HInstruction* first_input = ValueOfLocalAt(current_block_->GetPredecessors()[0], local);
237 HPhi* phi = new (allocator_) HPhi(
238 allocator_,
239 local,
240 current_block_->GetPredecessors().size(),
241 first_input->GetType());
242 for (size_t i = 0; i < current_block_->GetPredecessors().size(); i++) {
243 HInstruction* pred_value = ValueOfLocalAt(current_block_->GetPredecessors()[i], local);
244 phi->SetRawInputAt(i, pred_value);
245 }
246 current_block_->AddPhi(phi);
247 value = phi;
248 }
249 (*current_locals_)[local] = value;
250 }
251 }
252 }
253
PropagateLocalsToCatchBlocks()254 void HInstructionBuilder::PropagateLocalsToCatchBlocks() {
255 const HTryBoundary& try_entry = current_block_->GetTryCatchInformation()->GetTryEntry();
256 for (HBasicBlock* catch_block : try_entry.GetExceptionHandlers()) {
257 ScopedArenaVector<HInstruction*>* handler_locals = GetLocalsFor(catch_block);
258 DCHECK_EQ(handler_locals->size(), current_locals_->size());
259 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
260 HInstruction* handler_value = (*handler_locals)[vreg];
261 if (handler_value == nullptr) {
262 // Vreg was undefined at a previously encountered throwing instruction
263 // and the catch phi was deleted. Do not record the local value.
264 continue;
265 }
266 DCHECK(handler_value->IsPhi());
267
268 HInstruction* local_value = (*current_locals_)[vreg];
269 if (local_value == nullptr) {
270 // This is the first instruction throwing into `catch_block` where
271 // `vreg` is undefined. Delete the catch phi.
272 catch_block->RemovePhi(handler_value->AsPhi());
273 (*handler_locals)[vreg] = nullptr;
274 } else {
275 // Vreg has been defined at all instructions throwing into `catch_block`
276 // encountered so far. Record the local value in the catch phi.
277 handler_value->AsPhi()->AddInput(local_value);
278 }
279 }
280 }
281 }
282
AppendInstruction(HInstruction * instruction)283 void HInstructionBuilder::AppendInstruction(HInstruction* instruction) {
284 current_block_->AddInstruction(instruction);
285 InitializeInstruction(instruction);
286 }
287
InsertInstructionAtTop(HInstruction * instruction)288 void HInstructionBuilder::InsertInstructionAtTop(HInstruction* instruction) {
289 if (current_block_->GetInstructions().IsEmpty()) {
290 current_block_->AddInstruction(instruction);
291 } else {
292 current_block_->InsertInstructionBefore(instruction, current_block_->GetFirstInstruction());
293 }
294 InitializeInstruction(instruction);
295 }
296
InitializeInstruction(HInstruction * instruction)297 void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
298 if (instruction->NeedsEnvironment()) {
299 HEnvironment* environment = new (allocator_) HEnvironment(
300 allocator_,
301 current_locals_->size(),
302 graph_->GetArtMethod(),
303 instruction->GetDexPc(),
304 instruction);
305 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals_));
306 instruction->SetRawEnvironment(environment);
307 }
308 }
309
LoadNullCheckedLocal(uint32_t register_index,uint32_t dex_pc)310 HInstruction* HInstructionBuilder::LoadNullCheckedLocal(uint32_t register_index, uint32_t dex_pc) {
311 HInstruction* ref = LoadLocal(register_index, DataType::Type::kReference);
312 if (!ref->CanBeNull()) {
313 return ref;
314 }
315
316 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc);
317 AppendInstruction(null_check);
318 return null_check;
319 }
320
SetLoopHeaderPhiInputs()321 void HInstructionBuilder::SetLoopHeaderPhiInputs() {
322 for (size_t i = loop_headers_.size(); i > 0; --i) {
323 HBasicBlock* block = loop_headers_[i - 1];
324 for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
325 HPhi* phi = it.Current()->AsPhi();
326 size_t vreg = phi->GetRegNumber();
327 for (HBasicBlock* predecessor : block->GetPredecessors()) {
328 HInstruction* value = ValueOfLocalAt(predecessor, vreg);
329 if (value == nullptr) {
330 // Vreg is undefined at this predecessor. Mark it dead and leave with
331 // fewer inputs than predecessors. SsaChecker will fail if not removed.
332 phi->SetDead();
333 break;
334 } else {
335 phi->AddInput(value);
336 }
337 }
338 }
339 }
340 }
341
IsBlockPopulated(HBasicBlock * block)342 static bool IsBlockPopulated(HBasicBlock* block) {
343 if (block->IsLoopHeader()) {
344 // Suspend checks were inserted into loop headers during building of dominator tree.
345 DCHECK(block->GetFirstInstruction()->IsSuspendCheck());
346 return block->GetFirstInstruction() != block->GetLastInstruction();
347 } else if (block->IsCatchBlock()) {
348 // Nops were inserted into the beginning of catch blocks.
349 DCHECK(block->GetFirstInstruction()->IsNop());
350 return block->GetFirstInstruction() != block->GetLastInstruction();
351 } else {
352 return !block->GetInstructions().IsEmpty();
353 }
354 }
355
Build()356 bool HInstructionBuilder::Build() {
357 DCHECK(code_item_accessor_.HasCodeItem());
358 locals_for_.resize(
359 graph_->GetBlocks().size(),
360 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
361
362 // Find locations where we want to generate extra stackmaps for native debugging.
363 // This allows us to generate the info only at interesting points (for example,
364 // at start of java statement) rather than before every dex instruction.
365 const bool native_debuggable = code_generator_ != nullptr &&
366 code_generator_->GetCompilerOptions().GetNativeDebuggable();
367 ArenaBitVector* native_debug_info_locations = nullptr;
368 if (native_debuggable) {
369 native_debug_info_locations = FindNativeDebugInfoLocations();
370 }
371
372 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
373 current_block_ = block;
374 uint32_t block_dex_pc = current_block_->GetDexPc();
375
376 InitializeBlockLocals();
377
378 if (current_block_->IsEntryBlock()) {
379 InitializeParameters();
380 AppendInstruction(new (allocator_) HSuspendCheck(0u));
381 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
382 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
383 }
384 AppendInstruction(new (allocator_) HGoto(0u));
385 continue;
386 } else if (current_block_->IsExitBlock()) {
387 AppendInstruction(new (allocator_) HExit());
388 continue;
389 } else if (current_block_->IsLoopHeader()) {
390 HSuspendCheck* suspend_check = new (allocator_) HSuspendCheck(current_block_->GetDexPc());
391 current_block_->GetLoopInformation()->SetSuspendCheck(suspend_check);
392 // This is slightly odd because the loop header might not be empty (TryBoundary).
393 // But we're still creating the environment with locals from the top of the block.
394 InsertInstructionAtTop(suspend_check);
395 } else if (current_block_->IsCatchBlock()) {
396 // We add an environment emitting instruction at the beginning of each catch block, in order
397 // to support try catch inlining.
398 // This is slightly odd because the catch block might not be empty (TryBoundary).
399 InsertInstructionAtTop(new (allocator_) HNop(block_dex_pc, /* needs_environment= */ true));
400 }
401
402 if (block_dex_pc == kNoDexPc || current_block_ != block_builder_->GetBlockAt(block_dex_pc)) {
403 // Synthetic block that does not need to be populated.
404 DCHECK(IsBlockPopulated(current_block_));
405 continue;
406 }
407
408 DCHECK(!IsBlockPopulated(current_block_));
409
410 for (const DexInstructionPcPair& pair : code_item_accessor_.InstructionsFrom(block_dex_pc)) {
411 if (current_block_ == nullptr) {
412 // The previous instruction ended this block.
413 break;
414 }
415
416 const uint32_t dex_pc = pair.DexPc();
417 if (dex_pc != block_dex_pc && FindBlockStartingAt(dex_pc) != nullptr) {
418 // This dex_pc starts a new basic block.
419 break;
420 }
421
422 if (current_block_->IsTryBlock() && IsThrowingDexInstruction(pair.Inst())) {
423 PropagateLocalsToCatchBlocks();
424 }
425
426 if (native_debuggable && native_debug_info_locations->IsBitSet(dex_pc)) {
427 AppendInstruction(new (allocator_) HNop(dex_pc, /* needs_environment= */ true));
428 }
429
430 // Note: There may be no Thread for gtests.
431 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
432 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
433 << " " << pair.Inst().Name() << "@" << dex_pc;
434 if (!ProcessDexInstruction(pair.Inst(), dex_pc)) {
435 return false;
436 }
437 DCHECK(Thread::Current() == nullptr || !Thread::Current()->IsExceptionPending())
438 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
439 << " " << pair.Inst().Name() << "@" << dex_pc;
440 }
441
442 if (current_block_ != nullptr) {
443 // Branching instructions clear current_block, so we know the last
444 // instruction of the current block is not a branching instruction.
445 // We add an unconditional Goto to the next block.
446 DCHECK_EQ(current_block_->GetSuccessors().size(), 1u);
447 AppendInstruction(new (allocator_) HGoto());
448 }
449 }
450
451 SetLoopHeaderPhiInputs();
452
453 return true;
454 }
455
BuildIntrinsic(ArtMethod * method)456 void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) {
457 DCHECK(!code_item_accessor_.HasCodeItem());
458 DCHECK(method->IsIntrinsic());
459 if (kIsDebugBuild) {
460 ScopedObjectAccess soa(Thread::Current());
461 CHECK(!method->IsSignaturePolymorphic());
462 }
463
464 locals_for_.resize(
465 graph_->GetBlocks().size(),
466 ScopedArenaVector<HInstruction*>(local_allocator_->Adapter(kArenaAllocGraphBuilder)));
467
468 // Fill the entry block. Do not add suspend check, we do not want a suspend
469 // check in intrinsics; intrinsic methods are supposed to be fast.
470 current_block_ = graph_->GetEntryBlock();
471 InitializeBlockLocals();
472 InitializeParameters();
473 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
474 AppendInstruction(new (allocator_) HMethodEntryHook(0u));
475 }
476 AppendInstruction(new (allocator_) HGoto(0u));
477
478 // Fill the body.
479 current_block_ = current_block_->GetSingleSuccessor();
480 InitializeBlockLocals();
481 DCHECK(!IsBlockPopulated(current_block_));
482
483 // Add the intermediate representation, if available, or invoke instruction.
484 size_t in_vregs = graph_->GetNumberOfInVRegs();
485 size_t number_of_arguments =
486 in_vregs - std::count(current_locals_->end() - in_vregs, current_locals_->end(), nullptr);
487 uint32_t method_idx = dex_compilation_unit_->GetDexMethodIndex();
488 const char* shorty = dex_file_->GetMethodShorty(method_idx);
489 RangeInstructionOperands operands(graph_->GetNumberOfVRegs() - in_vregs, in_vregs);
490 if (!BuildSimpleIntrinsic(method, kNoDexPc, operands, shorty)) {
491 // Some intrinsics without intermediate representation still yield a leaf method,
492 // so build the invoke. Use HInvokeStaticOrDirect even for methods that would
493 // normally use an HInvokeVirtual (sharpen the call).
494 MethodReference target_method(dex_file_, method_idx);
495 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
496 MethodLoadKind::kRuntimeCall,
497 CodePtrLocation::kCallArtMethod,
498 /* method_load_data= */ 0u
499 };
500 InvokeType invoke_type = dex_compilation_unit_->IsStatic() ? kStatic : kDirect;
501 HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect(
502 allocator_,
503 number_of_arguments,
504 return_type_,
505 kNoDexPc,
506 target_method,
507 method,
508 dispatch_info,
509 invoke_type,
510 target_method,
511 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone,
512 !graph_->IsDebuggable());
513 HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
514 }
515
516 // Add the return instruction.
517 if (return_type_ == DataType::Type::kVoid) {
518 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
519 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), kNoDexPc));
520 }
521 AppendInstruction(new (allocator_) HReturnVoid());
522 } else {
523 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
524 AppendInstruction(new (allocator_) HMethodExitHook(latest_result_, kNoDexPc));
525 }
526 AppendInstruction(new (allocator_) HReturn(latest_result_));
527 }
528
529 // Fill the exit block.
530 DCHECK_EQ(current_block_->GetSingleSuccessor(), graph_->GetExitBlock());
531 current_block_ = graph_->GetExitBlock();
532 InitializeBlockLocals();
533 AppendInstruction(new (allocator_) HExit());
534 }
535
FindNativeDebugInfoLocations()536 ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
537 ArenaBitVector* locations = ArenaBitVector::Create(local_allocator_,
538 code_item_accessor_.InsnsSizeInCodeUnits(),
539 /* expandable= */ false,
540 kArenaAllocGraphBuilder);
541 // The visitor gets called when the line number changes.
542 // In other words, it marks the start of new java statement.
543 code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
544 locations->SetBit(entry.address_);
545 return false;
546 });
547 // Instruction-specific tweaks.
548 for (const DexInstructionPcPair& inst : code_item_accessor_) {
549 switch (inst->Opcode()) {
550 case Instruction::MOVE_EXCEPTION: {
551 // Stop in native debugger after the exception has been moved.
552 // The compiler also expects the move at the start of basic block so
553 // we do not want to interfere by inserting native-debug-info before it.
554 locations->ClearBit(inst.DexPc());
555 DexInstructionIterator next = std::next(DexInstructionIterator(inst));
556 DCHECK(next.DexPc() != inst.DexPc());
557 if (next != code_item_accessor_.end()) {
558 locations->SetBit(next.DexPc());
559 }
560 break;
561 }
562 default:
563 break;
564 }
565 }
566 return locations;
567 }
568
LoadLocal(uint32_t reg_number,DataType::Type type) const569 HInstruction* HInstructionBuilder::LoadLocal(uint32_t reg_number, DataType::Type type) const {
570 HInstruction* value = (*current_locals_)[reg_number];
571 DCHECK(value != nullptr);
572
573 // If the operation requests a specific type, we make sure its input is of that type.
574 if (type != value->GetType()) {
575 if (DataType::IsFloatingPointType(type)) {
576 value = ssa_builder_->GetFloatOrDoubleEquivalent(value, type);
577 } else if (type == DataType::Type::kReference) {
578 value = ssa_builder_->GetReferenceTypeEquivalent(value);
579 }
580 DCHECK(value != nullptr);
581 }
582
583 return value;
584 }
585
UpdateLocal(uint32_t reg_number,HInstruction * stored_value)586 void HInstructionBuilder::UpdateLocal(uint32_t reg_number, HInstruction* stored_value) {
587 DataType::Type stored_type = stored_value->GetType();
588 DCHECK_NE(stored_type, DataType::Type::kVoid);
589
590 // Storing into vreg `reg_number` may implicitly invalidate the surrounding
591 // registers. Consider the following cases:
592 // (1) Storing a wide value must overwrite previous values in both `reg_number`
593 // and `reg_number+1`. We store `nullptr` in `reg_number+1`.
594 // (2) If vreg `reg_number-1` holds a wide value, writing into `reg_number`
595 // must invalidate it. We store `nullptr` in `reg_number-1`.
596 // Consequently, storing a wide value into the high vreg of another wide value
597 // will invalidate both `reg_number-1` and `reg_number+1`.
598
599 if (reg_number != 0) {
600 HInstruction* local_low = (*current_locals_)[reg_number - 1];
601 if (local_low != nullptr && DataType::Is64BitType(local_low->GetType())) {
602 // The vreg we are storing into was previously the high vreg of a pair.
603 // We need to invalidate its low vreg.
604 DCHECK((*current_locals_)[reg_number] == nullptr);
605 (*current_locals_)[reg_number - 1] = nullptr;
606 }
607 }
608
609 (*current_locals_)[reg_number] = stored_value;
610 if (DataType::Is64BitType(stored_type)) {
611 // We are storing a pair. Invalidate the instruction in the high vreg.
612 (*current_locals_)[reg_number + 1] = nullptr;
613 }
614 }
615
InitializeParameters()616 void HInstructionBuilder::InitializeParameters() {
617 DCHECK(current_block_->IsEntryBlock());
618
619 // outer_compilation_unit_ is null only when unit testing.
620 if (outer_compilation_unit_ == nullptr) {
621 return;
622 }
623
624 const char* shorty = dex_compilation_unit_->GetShorty();
625 uint16_t number_of_parameters = graph_->GetNumberOfInVRegs();
626 uint16_t locals_index = graph_->GetNumberOfLocalVRegs();
627 uint16_t parameter_index = 0;
628
629 const dex::MethodId& referrer_method_id =
630 dex_file_->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
631 if (!dex_compilation_unit_->IsStatic()) {
632 // Add the implicit 'this' argument, not expressed in the signature.
633 HParameterValue* parameter = new (allocator_) HParameterValue(*dex_file_,
634 referrer_method_id.class_idx_,
635 parameter_index++,
636 DataType::Type::kReference,
637 /* is_this= */ true);
638 AppendInstruction(parameter);
639 UpdateLocal(locals_index++, parameter);
640 number_of_parameters--;
641 current_this_parameter_ = parameter;
642 } else {
643 DCHECK(current_this_parameter_ == nullptr);
644 }
645
646 const dex::ProtoId& proto = dex_file_->GetMethodPrototype(referrer_method_id);
647 const dex::TypeList* arg_types = dex_file_->GetProtoParameters(proto);
648 for (int i = 0, shorty_pos = 1; i < number_of_parameters; i++) {
649 HParameterValue* parameter = new (allocator_) HParameterValue(
650 *dex_file_,
651 arg_types->GetTypeItem(shorty_pos - 1).type_idx_,
652 parameter_index++,
653 DataType::FromShorty(shorty[shorty_pos]),
654 /* is_this= */ false);
655 ++shorty_pos;
656 AppendInstruction(parameter);
657 // Store the parameter value in the local that the dex code will use
658 // to reference that parameter.
659 UpdateLocal(locals_index++, parameter);
660 if (DataType::Is64BitType(parameter->GetType())) {
661 i++;
662 locals_index++;
663 parameter_index++;
664 }
665 }
666 }
667
668 template<typename T, bool kCompareWithZero>
If_21_22t(const Instruction & instruction,uint32_t dex_pc)669 void HInstructionBuilder::If_21_22t(const Instruction& instruction, uint32_t dex_pc) {
670 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
671 T* comparison = nullptr;
672 if (kCompareWithZero) {
673 comparison = new (allocator_) T(value, graph_->GetIntConstant(0, dex_pc), dex_pc);
674 } else {
675 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
676 comparison = new (allocator_) T(value, second, dex_pc);
677 }
678 AppendInstruction(comparison);
679 HIf* if_instr = new (allocator_) HIf(comparison, dex_pc);
680
681 ProfilingInfo* info = graph_->GetProfilingInfo();
682 if (info != nullptr && !graph_->IsCompilingBaseline()) {
683 BranchCache* cache = info->GetBranchCache(dex_pc);
684 if (cache != nullptr) {
685 if_instr->SetTrueCount(cache->GetTrue());
686 if_instr->SetFalseCount(cache->GetFalse());
687 }
688 }
689
690 // Append after setting true/false count, so that the builder knows if the
691 // instruction needs an environment.
692 AppendInstruction(if_instr);
693 current_block_ = nullptr;
694 }
695
696 template<typename T>
Unop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)697 void HInstructionBuilder::Unop_12x(const Instruction& instruction,
698 DataType::Type type,
699 uint32_t dex_pc) {
700 HInstruction* first = LoadLocal(instruction.VRegB(), type);
701 AppendInstruction(new (allocator_) T(type, first, dex_pc));
702 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
703 }
704
Conversion_12x(const Instruction & instruction,DataType::Type input_type,DataType::Type result_type,uint32_t dex_pc)705 void HInstructionBuilder::Conversion_12x(const Instruction& instruction,
706 DataType::Type input_type,
707 DataType::Type result_type,
708 uint32_t dex_pc) {
709 HInstruction* first = LoadLocal(instruction.VRegB(), input_type);
710 AppendInstruction(new (allocator_) HTypeConversion(result_type, first, dex_pc));
711 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
712 }
713
714 template<typename T>
Binop_23x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)715 void HInstructionBuilder::Binop_23x(const Instruction& instruction,
716 DataType::Type type,
717 uint32_t dex_pc) {
718 HInstruction* first = LoadLocal(instruction.VRegB(), type);
719 HInstruction* second = LoadLocal(instruction.VRegC(), type);
720 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
721 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
722 }
723
724 template<typename T>
Binop_23x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)725 void HInstructionBuilder::Binop_23x_shift(const Instruction& instruction,
726 DataType::Type type,
727 uint32_t dex_pc) {
728 HInstruction* first = LoadLocal(instruction.VRegB(), type);
729 HInstruction* second = LoadLocal(instruction.VRegC(), DataType::Type::kInt32);
730 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
731 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
732 }
733
Binop_23x_cmp(const Instruction & instruction,DataType::Type type,ComparisonBias bias,uint32_t dex_pc)734 void HInstructionBuilder::Binop_23x_cmp(const Instruction& instruction,
735 DataType::Type type,
736 ComparisonBias bias,
737 uint32_t dex_pc) {
738 HInstruction* first = LoadLocal(instruction.VRegB(), type);
739 HInstruction* second = LoadLocal(instruction.VRegC(), type);
740 AppendInstruction(new (allocator_) HCompare(type, first, second, bias, dex_pc));
741 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
742 }
743
744 template<typename T>
Binop_12x_shift(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)745 void HInstructionBuilder::Binop_12x_shift(const Instruction& instruction,
746 DataType::Type type,
747 uint32_t dex_pc) {
748 HInstruction* first = LoadLocal(instruction.VRegA(), type);
749 HInstruction* second = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
750 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
751 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
752 }
753
754 template<typename T>
Binop_12x(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)755 void HInstructionBuilder::Binop_12x(const Instruction& instruction,
756 DataType::Type type,
757 uint32_t dex_pc) {
758 HInstruction* first = LoadLocal(instruction.VRegA(), type);
759 HInstruction* second = LoadLocal(instruction.VRegB(), type);
760 AppendInstruction(new (allocator_) T(type, first, second, dex_pc));
761 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
762 }
763
764 template<typename T>
Binop_22s(const Instruction & instruction,bool reverse,uint32_t dex_pc)765 void HInstructionBuilder::Binop_22s(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
766 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
767 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22s(), dex_pc);
768 if (reverse) {
769 std::swap(first, second);
770 }
771 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
772 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
773 }
774
775 template<typename T>
Binop_22b(const Instruction & instruction,bool reverse,uint32_t dex_pc)776 void HInstructionBuilder::Binop_22b(const Instruction& instruction, bool reverse, uint32_t dex_pc) {
777 HInstruction* first = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
778 HInstruction* second = graph_->GetIntConstant(instruction.VRegC_22b(), dex_pc);
779 if (reverse) {
780 std::swap(first, second);
781 }
782 AppendInstruction(new (allocator_) T(DataType::Type::kInt32, first, second, dex_pc));
783 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
784 }
785
786 // Does the method being compiled need any constructor barriers being inserted?
787 // (Always 'false' for methods that aren't <init>.)
RequiresConstructorBarrier(const DexCompilationUnit * cu)788 static bool RequiresConstructorBarrier(const DexCompilationUnit* cu) {
789 // Can be null in unit tests only.
790 if (UNLIKELY(cu == nullptr)) {
791 return false;
792 }
793
794 // Constructor barriers are applicable only for <init> methods.
795 if (LIKELY(!cu->IsConstructor() || cu->IsStatic())) {
796 return false;
797 }
798
799 return cu->RequiresConstructorBarrier();
800 }
801
802 // Returns true if `block` has only one successor which starts at the next
803 // dex_pc after `instruction` at `dex_pc`.
IsFallthroughInstruction(const Instruction & instruction,uint32_t dex_pc,HBasicBlock * block)804 static bool IsFallthroughInstruction(const Instruction& instruction,
805 uint32_t dex_pc,
806 HBasicBlock* block) {
807 uint32_t next_dex_pc = dex_pc + instruction.SizeInCodeUnits();
808 return block->GetSingleSuccessor()->GetDexPc() == next_dex_pc;
809 }
810
BuildSwitch(const Instruction & instruction,uint32_t dex_pc)811 void HInstructionBuilder::BuildSwitch(const Instruction& instruction, uint32_t dex_pc) {
812 HInstruction* value = LoadLocal(instruction.VRegA(), DataType::Type::kInt32);
813 DexSwitchTable table(instruction, dex_pc);
814
815 if (table.GetNumEntries() == 0) {
816 // Empty Switch. Code falls through to the next block.
817 DCHECK(IsFallthroughInstruction(instruction, dex_pc, current_block_));
818 AppendInstruction(new (allocator_) HGoto(dex_pc));
819 } else if (table.ShouldBuildDecisionTree()) {
820 for (DexSwitchTableIterator it(table); !it.Done(); it.Advance()) {
821 HInstruction* case_value = graph_->GetIntConstant(it.CurrentKey(), dex_pc);
822 HEqual* comparison = new (allocator_) HEqual(value, case_value, dex_pc);
823 AppendInstruction(comparison);
824 AppendInstruction(new (allocator_) HIf(comparison, dex_pc));
825
826 if (!it.IsLast()) {
827 current_block_ = FindBlockStartingAt(it.GetDexPcForCurrentIndex());
828 }
829 }
830 } else {
831 AppendInstruction(
832 new (allocator_) HPackedSwitch(table.GetEntryAt(0), table.GetNumEntries(), value, dex_pc));
833 }
834
835 current_block_ = nullptr;
836 }
837
BuildReturn(const Instruction & instruction,DataType::Type type,uint32_t dex_pc)838 void HInstructionBuilder::BuildReturn(const Instruction& instruction,
839 DataType::Type type,
840 uint32_t dex_pc) {
841 if (type == DataType::Type::kVoid) {
842 // Only <init> (which is a return-void) could possibly have a constructor fence.
843 // This may insert additional redundant constructor fences from the super constructors.
844 // TODO: remove redundant constructor fences (b/36656456).
845 if (RequiresConstructorBarrier(dex_compilation_unit_)) {
846 // Compiling instance constructor.
847 DCHECK_STREQ("<init>", graph_->GetMethodName());
848
849 HInstruction* fence_target = current_this_parameter_;
850 DCHECK(fence_target != nullptr);
851
852 AppendInstruction(new (allocator_) HConstructorFence(fence_target, dex_pc, allocator_));
853 MaybeRecordStat(
854 compilation_stats_,
855 MethodCompilationStat::kConstructorFenceGeneratedFinal);
856 }
857 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
858 // Return value is not used for void functions. We pass NullConstant to
859 // avoid special cases when generating code.
860 AppendInstruction(new (allocator_) HMethodExitHook(graph_->GetNullConstant(), dex_pc));
861 }
862 AppendInstruction(new (allocator_) HReturnVoid(dex_pc));
863 } else {
864 DCHECK(!RequiresConstructorBarrier(dex_compilation_unit_));
865 HInstruction* value = LoadLocal(instruction.VRegA(), type);
866 if (graph_->IsDebuggable() && code_generator_->GetCompilerOptions().IsJitCompiler()) {
867 AppendInstruction(new (allocator_) HMethodExitHook(value, dex_pc));
868 }
869 AppendInstruction(new (allocator_) HReturn(value, dex_pc));
870 }
871 current_block_ = nullptr;
872 }
873
GetInvokeTypeFromOpCode(Instruction::Code opcode)874 static InvokeType GetInvokeTypeFromOpCode(Instruction::Code opcode) {
875 switch (opcode) {
876 case Instruction::INVOKE_STATIC:
877 case Instruction::INVOKE_STATIC_RANGE:
878 return kStatic;
879 case Instruction::INVOKE_DIRECT:
880 case Instruction::INVOKE_DIRECT_RANGE:
881 return kDirect;
882 case Instruction::INVOKE_VIRTUAL:
883 case Instruction::INVOKE_VIRTUAL_RANGE:
884 return kVirtual;
885 case Instruction::INVOKE_INTERFACE:
886 case Instruction::INVOKE_INTERFACE_RANGE:
887 return kInterface;
888 case Instruction::INVOKE_SUPER_RANGE:
889 case Instruction::INVOKE_SUPER:
890 return kSuper;
891 default:
892 LOG(FATAL) << "Unexpected invoke opcode: " << opcode;
893 UNREACHABLE();
894 }
895 }
896
897 // Try to resolve a method using the class linker. Return null if a method could
898 // not be resolved or the resolved method cannot be used for some reason.
899 // Also retrieve method data needed for creating the invoke intermediate
900 // representation while we hold the mutator lock here.
ResolveMethod(uint16_t method_idx,ArtMethod * referrer,const DexCompilationUnit & dex_compilation_unit,InvokeType * invoke_type,MethodReference * resolved_method_info,uint16_t * imt_or_vtable_index,bool * is_string_constructor)901 static ArtMethod* ResolveMethod(uint16_t method_idx,
902 ArtMethod* referrer,
903 const DexCompilationUnit& dex_compilation_unit,
904 /*inout*/InvokeType* invoke_type,
905 /*out*/MethodReference* resolved_method_info,
906 /*out*/uint16_t* imt_or_vtable_index,
907 /*out*/bool* is_string_constructor) {
908 ScopedObjectAccess soa(Thread::Current());
909
910 ClassLinker* class_linker = dex_compilation_unit.GetClassLinker();
911 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit.GetClassLoader();
912
913 ArtMethod* resolved_method =
914 class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
915 method_idx,
916 dex_compilation_unit.GetDexCache(),
917 class_loader,
918 referrer,
919 *invoke_type);
920
921 if (UNLIKELY(resolved_method == nullptr)) {
922 // Clean up any exception left by type resolution.
923 soa.Self()->ClearException();
924 return nullptr;
925 }
926 DCHECK(!soa.Self()->IsExceptionPending());
927
928 // The referrer may be unresolved for AOT if we're compiling a class that cannot be
929 // resolved because, for example, we don't find a superclass in the classpath.
930 if (referrer == nullptr) {
931 // The class linker cannot check access without a referrer, so we have to do it.
932 // Check if the declaring class or referencing class is accessible.
933 SamePackageCompare same_package(dex_compilation_unit);
934 ObjPtr<mirror::Class> declaring_class = resolved_method->GetDeclaringClass();
935 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
936 if (!declaring_class_accessible) {
937 // It is possible to access members from an inaccessible superclass
938 // by referencing them through an accessible subclass.
939 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
940 dex_compilation_unit.GetDexFile()->GetMethodId(method_idx).class_idx_,
941 dex_compilation_unit.GetDexCache().Get(),
942 class_loader.Get());
943 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the method.
944 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
945 return nullptr;
946 }
947 }
948 // Check whether the method itself is accessible.
949 // Since the referrer is unresolved but the method is resolved, it cannot be
950 // inside the same class, so a private method is known to be inaccessible.
951 // And without a resolved referrer, we cannot check for protected member access
952 // in superlass, so we handle only access to public member or within the package.
953 if (resolved_method->IsPrivate() ||
954 (!resolved_method->IsPublic() && !declaring_class_accessible)) {
955 return nullptr;
956 }
957 }
958
959 // We have to special case the invoke-super case, as ClassLinker::ResolveMethod does not.
960 // We need to look at the referrer's super class vtable. We need to do this to know if we need to
961 // make this an invoke-unresolved to handle cross-dex invokes or abstract super methods, both of
962 // which require runtime handling.
963 if (*invoke_type == kSuper) {
964 if (referrer == nullptr) {
965 // We could not determine the method's class we need to wait until runtime.
966 DCHECK(Runtime::Current()->IsAotCompiler());
967 return nullptr;
968 }
969 ArtMethod* actual_method = FindSuperMethodToCall</*access_check=*/true>(
970 method_idx, resolved_method, referrer, soa.Self());
971 if (actual_method == nullptr) {
972 // Clean up any exception left by method resolution.
973 soa.Self()->ClearException();
974 return nullptr;
975 }
976 if (!actual_method->IsInvokable()) {
977 // Fail if the actual method cannot be invoked. Otherwise, the runtime resolution stub
978 // could resolve the callee to the wrong method.
979 return nullptr;
980 }
981 // Call GetCanonicalMethod in case the resolved method is a copy: for super calls, the encoding
982 // of ArtMethod in BSS relies on not having copies there.
983 resolved_method = actual_method->GetCanonicalMethod(class_linker->GetImagePointerSize());
984 }
985
986 if (*invoke_type == kInterface) {
987 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
988 // If the resolved method is from j.l.Object, emit a virtual call instead.
989 // The IMT conflict stub only handles interface methods.
990 *invoke_type = kVirtual;
991 } else {
992 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
993 }
994 }
995
996 *resolved_method_info =
997 MethodReference(resolved_method->GetDexFile(), resolved_method->GetDexMethodIndex());
998 if (*invoke_type == kVirtual) {
999 // For HInvokeVirtual we need the vtable index.
1000 *imt_or_vtable_index = resolved_method->GetVtableIndex();
1001 } else if (*invoke_type == kInterface) {
1002 // For HInvokeInterface we need the IMT index.
1003 *imt_or_vtable_index = resolved_method->GetImtIndex();
1004 DCHECK_EQ(*imt_or_vtable_index, ImTable::GetImtIndex(resolved_method));
1005 }
1006
1007 *is_string_constructor = resolved_method->IsStringConstructor();
1008
1009 return resolved_method;
1010 }
1011
BuildInvoke(const Instruction & instruction,uint32_t dex_pc,uint32_t method_idx,const InstructionOperands & operands)1012 bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
1013 uint32_t dex_pc,
1014 uint32_t method_idx,
1015 const InstructionOperands& operands) {
1016 InvokeType invoke_type = GetInvokeTypeFromOpCode(instruction.Opcode());
1017 const char* shorty = dex_file_->GetMethodShorty(method_idx);
1018 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1019
1020 // Remove the return type from the 'proto'.
1021 size_t number_of_arguments = strlen(shorty) - 1;
1022 if (invoke_type != kStatic) { // instance call
1023 // One extra argument for 'this'.
1024 number_of_arguments++;
1025 }
1026
1027 MethodReference resolved_method_reference(nullptr, 0u);
1028 bool is_string_constructor = false;
1029 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1030 ArtMethod* resolved_method = ResolveMethod(method_idx,
1031 graph_->GetArtMethod(),
1032 *dex_compilation_unit_,
1033 &invoke_type,
1034 &resolved_method_reference,
1035 &imt_or_vtable_index,
1036 &is_string_constructor);
1037
1038 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1039 if (UNLIKELY(resolved_method == nullptr)) {
1040 DCHECK(!Thread::Current()->IsExceptionPending());
1041 MaybeRecordStat(compilation_stats_,
1042 MethodCompilationStat::kUnresolvedMethod);
1043 HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_,
1044 number_of_arguments,
1045 return_type,
1046 dex_pc,
1047 method_reference,
1048 invoke_type);
1049 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ true);
1050 }
1051
1052 // Replace calls to String.<init> with StringFactory.
1053 if (is_string_constructor) {
1054 uint32_t string_init_entry_point = WellKnownClasses::StringInitToEntryPoint(resolved_method);
1055 HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
1056 MethodLoadKind::kStringInit,
1057 CodePtrLocation::kCallArtMethod,
1058 dchecked_integral_cast<uint64_t>(string_init_entry_point)
1059 };
1060 // We pass null for the resolved_method to ensure optimizations
1061 // don't rely on it.
1062 HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect(
1063 allocator_,
1064 number_of_arguments - 1,
1065 /* return_type= */ DataType::Type::kReference,
1066 dex_pc,
1067 method_reference,
1068 /* resolved_method= */ nullptr,
1069 dispatch_info,
1070 invoke_type,
1071 resolved_method_reference,
1072 HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit,
1073 !graph_->IsDebuggable());
1074 return HandleStringInit(invoke, operands, shorty);
1075 }
1076
1077 // Potential class initialization check, in the case of a static method call.
1078 HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
1079 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1080 HClinitCheck* clinit_check = nullptr;
1081 if (invoke_type == kStatic) {
1082 clinit_check = ProcessClinitCheckForInvoke(dex_pc, resolved_method, &clinit_check_requirement);
1083 }
1084
1085 // Try to build an HIR replacement for the intrinsic.
1086 if (UNLIKELY(resolved_method->IsIntrinsic()) && !graph_->IsDebuggable()) {
1087 // All intrinsics are in the primary boot image, so their class can always be referenced
1088 // and we do not need to rely on the implicit class initialization check. The class should
1089 // be initialized but we do not require that here.
1090 DCHECK_NE(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
1091 if (BuildSimpleIntrinsic(resolved_method, dex_pc, operands, shorty)) {
1092 return true;
1093 }
1094 }
1095
1096 HInvoke* invoke = nullptr;
1097 if (invoke_type == kDirect || invoke_type == kStatic || invoke_type == kSuper) {
1098 // For sharpening, we create another MethodReference, to account for the
1099 // kSuper case below where we cannot find a dex method index.
1100 bool has_method_id = true;
1101 if (invoke_type == kSuper) {
1102 uint32_t dex_method_index = method_reference.index;
1103 if (IsSameDexFile(*resolved_method_reference.dex_file,
1104 *dex_compilation_unit_->GetDexFile())) {
1105 // Update the method index to the one resolved. Note that this may be a no-op if
1106 // we resolved to the method referenced by the instruction.
1107 dex_method_index = resolved_method_reference.index;
1108 } else {
1109 // Try to find a dex method index in this caller's dex file.
1110 ScopedObjectAccess soa(Thread::Current());
1111 dex_method_index = resolved_method->FindDexMethodIndexInOtherDexFile(
1112 *dex_compilation_unit_->GetDexFile(), method_idx);
1113 }
1114 if (dex_method_index == dex::kDexNoIndex) {
1115 has_method_id = false;
1116 } else {
1117 method_reference.index = dex_method_index;
1118 }
1119 }
1120 HInvokeStaticOrDirect::DispatchInfo dispatch_info =
1121 HSharpening::SharpenLoadMethod(resolved_method,
1122 has_method_id,
1123 /* for_interface_call= */ false,
1124 code_generator_);
1125 if (dispatch_info.code_ptr_location == CodePtrLocation::kCallCriticalNative) {
1126 graph_->SetHasDirectCriticalNativeCall(true);
1127 }
1128 invoke = new (allocator_) HInvokeStaticOrDirect(allocator_,
1129 number_of_arguments,
1130 return_type,
1131 dex_pc,
1132 method_reference,
1133 resolved_method,
1134 dispatch_info,
1135 invoke_type,
1136 resolved_method_reference,
1137 clinit_check_requirement,
1138 !graph_->IsDebuggable());
1139 if (clinit_check != nullptr) {
1140 // Add the class initialization check as last input of `invoke`.
1141 DCHECK_EQ(clinit_check_requirement, HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit);
1142 size_t clinit_check_index = invoke->InputCount() - 1u;
1143 DCHECK(invoke->InputAt(clinit_check_index) == nullptr);
1144 invoke->SetArgumentAt(clinit_check_index, clinit_check);
1145 }
1146 } else if (invoke_type == kVirtual) {
1147 invoke = new (allocator_) HInvokeVirtual(allocator_,
1148 number_of_arguments,
1149 return_type,
1150 dex_pc,
1151 method_reference,
1152 resolved_method,
1153 resolved_method_reference,
1154 /*vtable_index=*/ imt_or_vtable_index,
1155 !graph_->IsDebuggable());
1156 } else {
1157 DCHECK_EQ(invoke_type, kInterface);
1158 if (kIsDebugBuild) {
1159 ScopedObjectAccess soa(Thread::Current());
1160 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
1161 }
1162 MethodLoadKind load_kind = HSharpening::SharpenLoadMethod(
1163 resolved_method,
1164 /* has_method_id= */ true,
1165 /* for_interface_call= */ true,
1166 code_generator_)
1167 .method_load_kind;
1168 invoke = new (allocator_) HInvokeInterface(allocator_,
1169 number_of_arguments,
1170 return_type,
1171 dex_pc,
1172 method_reference,
1173 resolved_method,
1174 resolved_method_reference,
1175 /*imt_index=*/ imt_or_vtable_index,
1176 load_kind,
1177 !graph_->IsDebuggable());
1178 }
1179 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1180 }
1181
VarHandleAccessorNeedsReturnTypeCheck(HInvoke * invoke,DataType::Type return_type)1182 static bool VarHandleAccessorNeedsReturnTypeCheck(HInvoke* invoke, DataType::Type return_type) {
1183 mirror::VarHandle::AccessModeTemplate access_mode_template =
1184 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1185
1186 switch (access_mode_template) {
1187 case mirror::VarHandle::AccessModeTemplate::kGet:
1188 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate:
1189 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange:
1190 return return_type == DataType::Type::kReference;
1191 case mirror::VarHandle::AccessModeTemplate::kSet:
1192 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet:
1193 return false;
1194 }
1195 }
1196
1197 // This function initializes `VarHandleOptimizations`, does a number of static checks and disables
1198 // the intrinsic if some of the checks fail. This is necessary for the code generator to work (for
1199 // both the baseline and the optimizing compiler).
DecideVarHandleIntrinsic(HInvoke * invoke)1200 static void DecideVarHandleIntrinsic(HInvoke* invoke) {
1201 switch (invoke->GetIntrinsic()) {
1202 case Intrinsics::kVarHandleCompareAndExchange:
1203 case Intrinsics::kVarHandleCompareAndExchangeAcquire:
1204 case Intrinsics::kVarHandleCompareAndExchangeRelease:
1205 case Intrinsics::kVarHandleCompareAndSet:
1206 case Intrinsics::kVarHandleGet:
1207 case Intrinsics::kVarHandleGetAcquire:
1208 case Intrinsics::kVarHandleGetAndAdd:
1209 case Intrinsics::kVarHandleGetAndAddAcquire:
1210 case Intrinsics::kVarHandleGetAndAddRelease:
1211 case Intrinsics::kVarHandleGetAndBitwiseAnd:
1212 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
1213 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
1214 case Intrinsics::kVarHandleGetAndBitwiseOr:
1215 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
1216 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
1217 case Intrinsics::kVarHandleGetAndBitwiseXor:
1218 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
1219 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
1220 case Intrinsics::kVarHandleGetAndSet:
1221 case Intrinsics::kVarHandleGetAndSetAcquire:
1222 case Intrinsics::kVarHandleGetAndSetRelease:
1223 case Intrinsics::kVarHandleGetOpaque:
1224 case Intrinsics::kVarHandleGetVolatile:
1225 case Intrinsics::kVarHandleSet:
1226 case Intrinsics::kVarHandleSetOpaque:
1227 case Intrinsics::kVarHandleSetRelease:
1228 case Intrinsics::kVarHandleSetVolatile:
1229 case Intrinsics::kVarHandleWeakCompareAndSet:
1230 case Intrinsics::kVarHandleWeakCompareAndSetAcquire:
1231 case Intrinsics::kVarHandleWeakCompareAndSetPlain:
1232 case Intrinsics::kVarHandleWeakCompareAndSetRelease:
1233 break;
1234 default:
1235 return; // Not a VarHandle intrinsic, skip.
1236 }
1237
1238 DCHECK(invoke->IsInvokePolymorphic());
1239 VarHandleOptimizations optimizations(invoke);
1240
1241 // Do only simple static checks here (those for which we have enough information). More complex
1242 // checks should be done in instruction simplifier, which runs after other optimization passes
1243 // that may provide useful information.
1244
1245 size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
1246 if (expected_coordinates_count > 2u) {
1247 optimizations.SetDoNotIntrinsify();
1248 return;
1249 }
1250 if (expected_coordinates_count != 0u) {
1251 // Except for static fields (no coordinates), the first coordinate must be a reference.
1252 // Do not intrinsify if the reference is null as we would always go to slow path anyway.
1253 HInstruction* object = invoke->InputAt(1);
1254 if (object->GetType() != DataType::Type::kReference || object->IsNullConstant()) {
1255 optimizations.SetDoNotIntrinsify();
1256 return;
1257 }
1258 }
1259 if (expected_coordinates_count == 2u) {
1260 // For arrays and views, the second coordinate must be convertible to `int`.
1261 // In this context, `boolean` is not convertible but we have to look at the shorty
1262 // as compiler transformations can give the invoke a valid boolean input.
1263 DataType::Type index_type = GetDataTypeFromShorty(invoke, 2);
1264 if (index_type == DataType::Type::kBool ||
1265 DataType::Kind(index_type) != DataType::Type::kInt32) {
1266 optimizations.SetDoNotIntrinsify();
1267 return;
1268 }
1269 }
1270
1271 uint32_t number_of_arguments = invoke->GetNumberOfArguments();
1272 DataType::Type return_type = invoke->GetType();
1273 mirror::VarHandle::AccessModeTemplate access_mode_template =
1274 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
1275 switch (access_mode_template) {
1276 case mirror::VarHandle::AccessModeTemplate::kGet:
1277 // The return type should be the same as varType, so it shouldn't be void.
1278 if (return_type == DataType::Type::kVoid) {
1279 optimizations.SetDoNotIntrinsify();
1280 return;
1281 }
1282 break;
1283 case mirror::VarHandle::AccessModeTemplate::kSet:
1284 if (return_type != DataType::Type::kVoid) {
1285 optimizations.SetDoNotIntrinsify();
1286 return;
1287 }
1288 break;
1289 case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: {
1290 if (return_type != DataType::Type::kBool) {
1291 optimizations.SetDoNotIntrinsify();
1292 return;
1293 }
1294 uint32_t expected_value_index = number_of_arguments - 2;
1295 uint32_t new_value_index = number_of_arguments - 1;
1296 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1297 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1298 if (expected_value_type != new_value_type) {
1299 optimizations.SetDoNotIntrinsify();
1300 return;
1301 }
1302 break;
1303 }
1304 case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: {
1305 uint32_t expected_value_index = number_of_arguments - 2;
1306 uint32_t new_value_index = number_of_arguments - 1;
1307 DataType::Type expected_value_type = GetDataTypeFromShorty(invoke, expected_value_index);
1308 DataType::Type new_value_type = GetDataTypeFromShorty(invoke, new_value_index);
1309 if (expected_value_type != new_value_type || return_type != expected_value_type) {
1310 optimizations.SetDoNotIntrinsify();
1311 return;
1312 }
1313 break;
1314 }
1315 case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: {
1316 DataType::Type value_type = GetDataTypeFromShorty(invoke, number_of_arguments - 1);
1317 if (IsVarHandleGetAndAdd(invoke) &&
1318 (value_type == DataType::Type::kReference || value_type == DataType::Type::kBool)) {
1319 // We should only add numerical types.
1320 //
1321 // For byte array views floating-point types are not allowed, see javadoc comments for
1322 // java.lang.invoke.MethodHandles.byteArrayViewVarHandle(). But ART treats them as numeric
1323 // types in ByteArrayViewVarHandle::Access(). Consequently we do generate intrinsic code,
1324 // but it always fails access mode check at runtime.
1325 optimizations.SetDoNotIntrinsify();
1326 return;
1327 } else if (IsVarHandleGetAndBitwiseOp(invoke) && !DataType::IsIntegralType(value_type)) {
1328 // We can only apply operators to bitwise integral types.
1329 // Note that bitwise VarHandle operations accept a non-integral boolean type and
1330 // perform the appropriate logical operation. However, the result is the same as
1331 // using the bitwise operation on our boolean representation and this fits well
1332 // with DataType::IsIntegralType() treating the compiler type kBool as integral.
1333 optimizations.SetDoNotIntrinsify();
1334 return;
1335 }
1336 if (value_type != return_type) {
1337 optimizations.SetDoNotIntrinsify();
1338 return;
1339 }
1340 break;
1341 }
1342 }
1343 }
1344
BuildInvokePolymorphic(uint32_t dex_pc,uint32_t method_idx,dex::ProtoIndex proto_idx,const InstructionOperands & operands)1345 bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc,
1346 uint32_t method_idx,
1347 dex::ProtoIndex proto_idx,
1348 const InstructionOperands& operands) {
1349 const char* shorty = dex_file_->GetShorty(proto_idx);
1350 DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands());
1351 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1352 size_t number_of_arguments = strlen(shorty);
1353 // We use ResolveMethod which is also used in BuildInvoke in order to
1354 // not duplicate code. As such, we need to provide is_string_constructor
1355 // even if we don't need it afterwards.
1356 InvokeType invoke_type = InvokeType::kPolymorphic;
1357 bool is_string_constructor = false;
1358 uint16_t imt_or_vtable_index = DexFile::kDexNoIndex16;
1359 MethodReference resolved_method_reference(nullptr, 0u);
1360 ArtMethod* resolved_method = ResolveMethod(method_idx,
1361 graph_->GetArtMethod(),
1362 *dex_compilation_unit_,
1363 &invoke_type,
1364 &resolved_method_reference,
1365 &imt_or_vtable_index,
1366 &is_string_constructor);
1367
1368 MethodReference method_reference(&graph_->GetDexFile(), method_idx);
1369 HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_,
1370 number_of_arguments,
1371 return_type,
1372 dex_pc,
1373 method_reference,
1374 resolved_method,
1375 resolved_method_reference,
1376 proto_idx);
1377 if (!HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false)) {
1378 return false;
1379 }
1380
1381 if (invoke->GetIntrinsic() != Intrinsics::kNone &&
1382 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvoke &&
1383 invoke->GetIntrinsic() != Intrinsics::kMethodHandleInvokeExact &&
1384 VarHandleAccessorNeedsReturnTypeCheck(invoke, return_type)) {
1385 // Type check is needed because VarHandle intrinsics do not type check the retrieved reference.
1386 ScopedObjectAccess soa(Thread::Current());
1387 ArtMethod* referrer = graph_->GetArtMethod();
1388 dex::TypeIndex return_type_index =
1389 referrer->GetDexFile()->GetProtoId(proto_idx).return_type_idx_;
1390
1391 BuildTypeCheck(/* is_instance_of= */ false, invoke, return_type_index, dex_pc);
1392 latest_result_ = current_block_->GetLastInstruction();
1393 }
1394
1395 DecideVarHandleIntrinsic(invoke);
1396
1397 return true;
1398 }
1399
1400
BuildInvokeCustom(uint32_t dex_pc,uint32_t call_site_idx,const InstructionOperands & operands)1401 bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc,
1402 uint32_t call_site_idx,
1403 const InstructionOperands& operands) {
1404 dex::ProtoIndex proto_idx = dex_file_->GetProtoIndexForCallSite(call_site_idx);
1405 const char* shorty = dex_file_->GetShorty(proto_idx);
1406 DataType::Type return_type = DataType::FromShorty(shorty[0]);
1407 size_t number_of_arguments = strlen(shorty) - 1;
1408 // HInvokeCustom takes a DexNoNoIndex method reference.
1409 MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex);
1410 HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_,
1411 number_of_arguments,
1412 call_site_idx,
1413 return_type,
1414 dex_pc,
1415 method_reference,
1416 !graph_->IsDebuggable());
1417 return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false);
1418 }
1419
BuildNewInstance(dex::TypeIndex type_index,uint32_t dex_pc)1420 HNewInstance* HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
1421 ScopedObjectAccess soa(Thread::Current());
1422
1423 HLoadClass* load_class = BuildLoadClass(type_index, dex_pc);
1424
1425 HInstruction* cls = load_class;
1426 Handle<mirror::Class> klass = load_class->GetClass();
1427
1428 if (!IsInitialized(klass.Get())) {
1429 cls = new (allocator_) HClinitCheck(load_class, dex_pc);
1430 AppendInstruction(cls);
1431 }
1432
1433 // Only the access check entrypoint handles the finalizable class case. If we
1434 // need access checks, then we haven't resolved the method and the class may
1435 // again be finalizable.
1436 QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
1437 if (load_class->NeedsAccessCheck() ||
1438 klass == nullptr || // Finalizable/instantiable is unknown.
1439 klass->IsFinalizable() ||
1440 klass.Get() == klass->GetClass() || // Classes cannot be allocated in code
1441 !klass->IsInstantiable()) {
1442 entrypoint = kQuickAllocObjectWithChecks;
1443 }
1444 // We will always be able to resolve the string class since it is in the BCP.
1445 if (!klass.IsNull() && klass->IsStringClass()) {
1446 entrypoint = kQuickAllocStringObject;
1447 }
1448
1449 // Consider classes we haven't resolved as potentially finalizable.
1450 bool finalizable = (klass == nullptr) || klass->IsFinalizable();
1451
1452 HNewInstance* new_instance = new (allocator_) HNewInstance(
1453 cls,
1454 dex_pc,
1455 type_index,
1456 *dex_compilation_unit_->GetDexFile(),
1457 finalizable,
1458 entrypoint);
1459 AppendInstruction(new_instance);
1460
1461 return new_instance;
1462 }
1463
BuildConstructorFenceForAllocation(HInstruction * allocation)1464 void HInstructionBuilder::BuildConstructorFenceForAllocation(HInstruction* allocation) {
1465 DCHECK(allocation != nullptr &&
1466 (allocation->IsNewInstance() ||
1467 allocation->IsNewArray())); // corresponding to "new" keyword in JLS.
1468
1469 if (allocation->IsNewInstance()) {
1470 // STRING SPECIAL HANDLING:
1471 // -------------------------------
1472 // Strings have a real HNewInstance node but they end up always having 0 uses.
1473 // All uses of a String HNewInstance are always transformed to replace their input
1474 // of the HNewInstance with an input of the invoke to StringFactory.
1475 //
1476 // Do not emit an HConstructorFence here since it can inhibit some String new-instance
1477 // optimizations (to pass checker tests that rely on those optimizations).
1478 HNewInstance* new_inst = allocation->AsNewInstance();
1479 HLoadClass* load_class = new_inst->GetLoadClass();
1480
1481 Thread* self = Thread::Current();
1482 ScopedObjectAccess soa(self);
1483 StackHandleScope<1> hs(self);
1484 Handle<mirror::Class> klass = load_class->GetClass();
1485 if (klass != nullptr && klass->IsStringClass()) {
1486 return;
1487 // Note: Do not use allocation->IsStringAlloc which requires
1488 // a valid ReferenceTypeInfo, but that doesn't get made until after reference type
1489 // propagation (and instruction builder is too early).
1490 }
1491 // (In terms of correctness, the StringFactory needs to provide its own
1492 // default initialization barrier, see below.)
1493 }
1494
1495 // JLS 17.4.5 "Happens-before Order" describes:
1496 //
1497 // The default initialization of any object happens-before any other actions (other than
1498 // default-writes) of a program.
1499 //
1500 // In our implementation the default initialization of an object to type T means
1501 // setting all of its initial data (object[0..size)) to 0, and setting the
1502 // object's class header (i.e. object.getClass() == T.class).
1503 //
1504 // In practice this fence ensures that the writes to the object header
1505 // are visible to other threads if this object escapes the current thread.
1506 // (and in theory the 0-initializing, but that happens automatically
1507 // when new memory pages are mapped in by the OS).
1508 HConstructorFence* ctor_fence =
1509 new (allocator_) HConstructorFence(allocation, allocation->GetDexPc(), allocator_);
1510 AppendInstruction(ctor_fence);
1511 MaybeRecordStat(
1512 compilation_stats_,
1513 MethodCompilationStat::kConstructorFenceGeneratedNew);
1514 }
1515
IsInImage(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1516 static bool IsInImage(ObjPtr<mirror::Class> cls, const CompilerOptions& compiler_options)
1517 REQUIRES_SHARED(Locks::mutator_lock_) {
1518 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(cls)) {
1519 return true;
1520 }
1521 if (compiler_options.IsGeneratingImage()) {
1522 std::string temp;
1523 const char* descriptor = cls->GetDescriptor(&temp);
1524 return compiler_options.IsImageClass(descriptor);
1525 } else {
1526 return false;
1527 }
1528 }
1529
IsSubClass(ObjPtr<mirror::Class> to_test,ObjPtr<mirror::Class> super_class)1530 static bool IsSubClass(ObjPtr<mirror::Class> to_test, ObjPtr<mirror::Class> super_class)
1531 REQUIRES_SHARED(Locks::mutator_lock_) {
1532 return to_test != nullptr && !to_test->IsInterface() && to_test->IsSubClass(super_class);
1533 }
1534
HasTrivialClinit(ObjPtr<mirror::Class> klass,PointerSize pointer_size)1535 static bool HasTrivialClinit(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
1536 REQUIRES_SHARED(Locks::mutator_lock_) {
1537 // Check if the class has encoded fields that trigger bytecode execution.
1538 // (Encoded fields are just a different representation of <clinit>.)
1539 if (klass->NumStaticFields() != 0u) {
1540 DCHECK(klass->GetClassDef() != nullptr);
1541 EncodedStaticFieldValueIterator it(klass->GetDexFile(), *klass->GetClassDef());
1542 for (; it.HasNext(); it.Next()) {
1543 switch (it.GetValueType()) {
1544 case EncodedArrayValueIterator::ValueType::kBoolean:
1545 case EncodedArrayValueIterator::ValueType::kByte:
1546 case EncodedArrayValueIterator::ValueType::kShort:
1547 case EncodedArrayValueIterator::ValueType::kChar:
1548 case EncodedArrayValueIterator::ValueType::kInt:
1549 case EncodedArrayValueIterator::ValueType::kLong:
1550 case EncodedArrayValueIterator::ValueType::kFloat:
1551 case EncodedArrayValueIterator::ValueType::kDouble:
1552 case EncodedArrayValueIterator::ValueType::kNull:
1553 case EncodedArrayValueIterator::ValueType::kString:
1554 // Primitive, null or j.l.String initialization is permitted.
1555 break;
1556 case EncodedArrayValueIterator::ValueType::kType:
1557 // Type initialization can load classes and execute bytecode through a class loader
1558 // which can execute arbitrary bytecode. We do not optimize for known class loaders;
1559 // kType is rarely used (if ever).
1560 return false;
1561 default:
1562 // Other types in the encoded static field list are rejected by the DexFileVerifier.
1563 LOG(FATAL) << "Unexpected type " << it.GetValueType();
1564 UNREACHABLE();
1565 }
1566 }
1567 }
1568 // Check if the class has <clinit> that executes arbitrary code.
1569 // Initialization of static fields of the class itself with constants is allowed.
1570 ArtMethod* clinit = klass->FindClassInitializer(pointer_size);
1571 if (clinit != nullptr) {
1572 const DexFile& dex_file = *clinit->GetDexFile();
1573 CodeItemInstructionAccessor accessor(dex_file, clinit->GetCodeItem());
1574 for (DexInstructionPcPair it : accessor) {
1575 switch (it->Opcode()) {
1576 case Instruction::CONST_4:
1577 case Instruction::CONST_16:
1578 case Instruction::CONST:
1579 case Instruction::CONST_HIGH16:
1580 case Instruction::CONST_WIDE_16:
1581 case Instruction::CONST_WIDE_32:
1582 case Instruction::CONST_WIDE:
1583 case Instruction::CONST_WIDE_HIGH16:
1584 case Instruction::CONST_STRING:
1585 case Instruction::CONST_STRING_JUMBO:
1586 // Primitive, null or j.l.String initialization is permitted.
1587 break;
1588 case Instruction::RETURN_VOID:
1589 break;
1590 case Instruction::SPUT:
1591 case Instruction::SPUT_WIDE:
1592 case Instruction::SPUT_OBJECT:
1593 case Instruction::SPUT_BOOLEAN:
1594 case Instruction::SPUT_BYTE:
1595 case Instruction::SPUT_CHAR:
1596 case Instruction::SPUT_SHORT:
1597 // Only initialization of a static field of the same class is permitted.
1598 if (dex_file.GetFieldId(it->VRegB_21c()).class_idx_ != klass->GetDexTypeIndex()) {
1599 return false;
1600 }
1601 break;
1602 case Instruction::NEW_ARRAY:
1603 // Only primitive arrays are permitted.
1604 if (Primitive::GetType(dex_file.GetTypeDescriptor(dex_file.GetTypeId(
1605 dex::TypeIndex(it->VRegC_22c())))[1]) == Primitive::kPrimNot) {
1606 return false;
1607 }
1608 break;
1609 case Instruction::APUT:
1610 case Instruction::APUT_WIDE:
1611 case Instruction::APUT_BOOLEAN:
1612 case Instruction::APUT_BYTE:
1613 case Instruction::APUT_CHAR:
1614 case Instruction::APUT_SHORT:
1615 case Instruction::FILL_ARRAY_DATA:
1616 case Instruction::NOP:
1617 // Allow initialization of primitive arrays (only constants can be stored).
1618 // Note: We expect NOPs used for fill-array-data-payload but accept all NOPs
1619 // (even unreferenced switch payloads if they make it through the verifier).
1620 break;
1621 default:
1622 return false;
1623 }
1624 }
1625 }
1626 return true;
1627 }
1628
HasTrivialInitialization(ObjPtr<mirror::Class> cls,const CompilerOptions & compiler_options)1629 static bool HasTrivialInitialization(ObjPtr<mirror::Class> cls,
1630 const CompilerOptions& compiler_options)
1631 REQUIRES_SHARED(Locks::mutator_lock_) {
1632 Runtime* runtime = Runtime::Current();
1633 PointerSize pointer_size = runtime->GetClassLinker()->GetImagePointerSize();
1634
1635 // Check the superclass chain.
1636 for (ObjPtr<mirror::Class> klass = cls; klass != nullptr; klass = klass->GetSuperClass()) {
1637 if (klass->IsInitialized() && IsInImage(klass, compiler_options)) {
1638 break; // `klass` and its superclasses are already initialized in the boot or app image.
1639 }
1640 if (!HasTrivialClinit(klass, pointer_size)) {
1641 return false;
1642 }
1643 }
1644
1645 // Also check interfaces with default methods as they need to be initialized as well.
1646 ObjPtr<mirror::IfTable> iftable = cls->GetIfTable();
1647 DCHECK(iftable != nullptr);
1648 for (int32_t i = 0, count = iftable->Count(); i != count; ++i) {
1649 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
1650 if (!iface->HasDefaultMethods()) {
1651 continue; // Initializing `cls` does not initialize this interface.
1652 }
1653 if (iface->IsInitialized() && IsInImage(iface, compiler_options)) {
1654 continue; // This interface is already initialized in the boot or app image.
1655 }
1656 if (!HasTrivialClinit(iface, pointer_size)) {
1657 return false;
1658 }
1659 }
1660 return true;
1661 }
1662
IsInitialized(ObjPtr<mirror::Class> cls) const1663 bool HInstructionBuilder::IsInitialized(ObjPtr<mirror::Class> cls) const {
1664 if (cls == nullptr) {
1665 return false;
1666 }
1667
1668 // Check if the class will be initialized at runtime.
1669 if (cls->IsInitialized()) {
1670 const CompilerOptions& compiler_options = code_generator_->GetCompilerOptions();
1671 if (compiler_options.IsAotCompiler()) {
1672 // Assume loaded only if klass is in the boot or app image.
1673 if (IsInImage(cls, compiler_options)) {
1674 return true;
1675 }
1676 } else {
1677 DCHECK(compiler_options.IsJitCompiler());
1678 if (Runtime::Current()->GetJit()->CanAssumeInitialized(
1679 cls,
1680 compiler_options.IsJitCompilerForSharedCode())) {
1681 // For JIT, the class cannot revert to an uninitialized state.
1682 return true;
1683 }
1684 }
1685 }
1686
1687 // We can avoid the class initialization check for `cls` in static methods and constructors
1688 // in the very same class; invoking a static method involves a class initialization check
1689 // and so does the instance allocation that must be executed before invoking a constructor.
1690 // Other instance methods of the same class can run on an escaped instance
1691 // of an erroneous class. Even a superclass may need to be checked as the subclass
1692 // can be completely initialized while the superclass is initializing and the subclass
1693 // remains initialized when the superclass initializer throws afterwards. b/62478025
1694 // Note: The HClinitCheck+HInvokeStaticOrDirect merging can still apply.
1695 auto is_static_method_or_constructor_of_cls = [cls](const DexCompilationUnit& compilation_unit)
1696 REQUIRES_SHARED(Locks::mutator_lock_) {
1697 return (compilation_unit.GetAccessFlags() & (kAccStatic | kAccConstructor)) != 0u &&
1698 compilation_unit.GetCompilingClass().Get() == cls;
1699 };
1700 if (is_static_method_or_constructor_of_cls(*outer_compilation_unit_) ||
1701 // Check also the innermost method. Though excessive copies of ClinitCheck can be
1702 // eliminated by GVN, that happens only after the decision whether to inline the
1703 // graph or not and that may depend on the presence of the ClinitCheck.
1704 // TODO: We should walk over the entire inlined method chain, but we don't pass that
1705 // information to the builder.
1706 is_static_method_or_constructor_of_cls(*dex_compilation_unit_)) {
1707 return true;
1708 }
1709
1710 // Otherwise, we may be able to avoid the check if `cls` is a superclass of a method being
1711 // compiled here (anywhere in the inlining chain) as the `cls` must have started initializing
1712 // before calling any `cls` or subclass methods. Static methods require a clinit check and
1713 // instance methods require an instance which cannot be created before doing a clinit check.
1714 // When a subclass of `cls` starts initializing, it starts initializing its superclass
1715 // chain up to `cls` without running any bytecode, i.e. without any opportunity for circular
1716 // initialization weirdness.
1717 //
1718 // If the initialization of `cls` is trivial (`cls` and its superclasses and superinterfaces
1719 // with default methods initialize only their own static fields using constant values), it must
1720 // complete, either successfully or by throwing and marking `cls` erroneous, without allocating
1721 // any instances of `cls` or subclasses (or any other class) and without calling any methods.
1722 // If it completes by throwing, no instances of `cls` shall be created and no subclass method
1723 // bytecode shall execute (see above), therefore the instruction we're building shall be
1724 // unreachable. By reaching the instruction, we know that `cls` was initialized successfully.
1725 //
1726 // TODO: We should walk over the entire inlined methods chain, but we don't pass that
1727 // information to the builder. (We could also check if we're guaranteed a non-null instance
1728 // of `cls` at this location but that's outside the scope of the instruction builder.)
1729 bool is_subclass = IsSubClass(outer_compilation_unit_->GetCompilingClass().Get(), cls);
1730 if (dex_compilation_unit_ != outer_compilation_unit_) {
1731 is_subclass = is_subclass ||
1732 IsSubClass(dex_compilation_unit_->GetCompilingClass().Get(), cls);
1733 }
1734 if (is_subclass && HasTrivialInitialization(cls, code_generator_->GetCompilerOptions())) {
1735 return true;
1736 }
1737
1738 return false;
1739 }
1740
ProcessClinitCheckForInvoke(uint32_t dex_pc,ArtMethod * resolved_method,HInvokeStaticOrDirect::ClinitCheckRequirement * clinit_check_requirement)1741 HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
1742 uint32_t dex_pc,
1743 ArtMethod* resolved_method,
1744 HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
1745 ScopedObjectAccess soa(Thread::Current());
1746 ObjPtr<mirror::Class> klass = resolved_method->GetDeclaringClass();
1747
1748 HClinitCheck* clinit_check = nullptr;
1749 if (IsInitialized(klass)) {
1750 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
1751 } else {
1752 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
1753 HLoadClass* cls = BuildLoadClass(h_klass->GetDexTypeIndex(),
1754 h_klass->GetDexFile(),
1755 h_klass,
1756 dex_pc,
1757 /* needs_access_check= */ false);
1758 if (cls != nullptr) {
1759 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
1760 clinit_check = new (allocator_) HClinitCheck(cls, dex_pc);
1761 AppendInstruction(clinit_check);
1762 } else {
1763 // Let the invoke handle this with an implicit class initialization check.
1764 *clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
1765 }
1766 }
1767 return clinit_check;
1768 }
1769
SetupInvokeArguments(HInstruction * invoke,const InstructionOperands & operands,const char * shorty,ReceiverArg receiver_arg)1770 bool HInstructionBuilder::SetupInvokeArguments(HInstruction* invoke,
1771 const InstructionOperands& operands,
1772 const char* shorty,
1773 ReceiverArg receiver_arg) {
1774 // Note: The `invoke` can be an intrinsic replacement, so not necessaritly HInvoke.
1775 // In that case, do not log errors, they shall be reported when we try to build the HInvoke.
1776 uint32_t shorty_index = 1; // Skip the return type.
1777 const size_t number_of_operands = operands.GetNumberOfOperands();
1778 bool argument_length_error = false;
1779
1780 size_t start_index = 0u;
1781 size_t argument_index = 0u;
1782 if (receiver_arg != ReceiverArg::kNone) {
1783 if (number_of_operands == 0u) {
1784 argument_length_error = true;
1785 } else {
1786 start_index = 1u;
1787 if (receiver_arg != ReceiverArg::kIgnored) {
1788 uint32_t obj_reg = operands.GetOperand(0u);
1789 HInstruction* arg = (receiver_arg == ReceiverArg::kPlainArg)
1790 ? LoadLocal(obj_reg, DataType::Type::kReference)
1791 : LoadNullCheckedLocal(obj_reg, invoke->GetDexPc());
1792 if (receiver_arg != ReceiverArg::kNullCheckedOnly) {
1793 invoke->SetRawInputAt(0u, arg);
1794 argument_index = 1u;
1795 }
1796 }
1797 }
1798 }
1799
1800 for (size_t i = start_index; i < number_of_operands; ++i, ++argument_index) {
1801 // Make sure we don't go over the expected arguments or over the number of
1802 // dex registers given. If the instruction was seen as dead by the verifier,
1803 // it hasn't been properly checked.
1804 if (UNLIKELY(shorty[shorty_index] == 0)) {
1805 argument_length_error = true;
1806 break;
1807 }
1808 DataType::Type type = DataType::FromShorty(shorty[shorty_index++]);
1809 bool is_wide = (type == DataType::Type::kInt64) || (type == DataType::Type::kFloat64);
1810 if (is_wide && ((i + 1 == number_of_operands) ||
1811 (operands.GetOperand(i) + 1 != operands.GetOperand(i + 1)))) {
1812 if (invoke->IsInvoke()) {
1813 // Longs and doubles should be in pairs, that is, sequential registers. The verifier should
1814 // reject any class where this is violated. However, the verifier only does these checks
1815 // on non trivially dead instructions, so we just bailout the compilation.
1816 VLOG(compiler) << "Did not compile "
1817 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1818 << " because of non-sequential dex register pair in wide argument";
1819 MaybeRecordStat(compilation_stats_,
1820 MethodCompilationStat::kNotCompiledMalformedOpcode);
1821 }
1822 return false;
1823 }
1824 HInstruction* arg = LoadLocal(operands.GetOperand(i), type);
1825 DCHECK(invoke->InputAt(argument_index) == nullptr);
1826 invoke->SetRawInputAt(argument_index, arg);
1827 if (is_wide) {
1828 ++i;
1829 }
1830 }
1831
1832 argument_length_error = argument_length_error || shorty[shorty_index] != 0;
1833 if (argument_length_error) {
1834 if (invoke->IsInvoke()) {
1835 VLOG(compiler) << "Did not compile "
1836 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
1837 << " because of wrong number of arguments in invoke instruction";
1838 MaybeRecordStat(compilation_stats_,
1839 MethodCompilationStat::kNotCompiledMalformedOpcode);
1840 }
1841 return false;
1842 }
1843
1844 if (invoke->IsInvokeStaticOrDirect() &&
1845 HInvokeStaticOrDirect::NeedsCurrentMethodInput(
1846 invoke->AsInvokeStaticOrDirect()->GetDispatchInfo())) {
1847 DCHECK_EQ(argument_index, invoke->AsInvokeStaticOrDirect()->GetCurrentMethodIndex());
1848 DCHECK(invoke->InputAt(argument_index) == nullptr);
1849 invoke->SetRawInputAt(argument_index, graph_->GetCurrentMethod());
1850 }
1851
1852 if (invoke->IsInvokeInterface() &&
1853 (invoke->AsInvokeInterface()->GetHiddenArgumentLoadKind() == MethodLoadKind::kRecursive)) {
1854 invoke->SetRawInputAt(invoke->AsInvokeInterface()->GetNumberOfArguments() - 1,
1855 graph_->GetCurrentMethod());
1856 }
1857
1858 return true;
1859 }
1860
HandleInvoke(HInvoke * invoke,const InstructionOperands & operands,const char * shorty,bool is_unresolved)1861 bool HInstructionBuilder::HandleInvoke(HInvoke* invoke,
1862 const InstructionOperands& operands,
1863 const char* shorty,
1864 bool is_unresolved) {
1865 DCHECK_IMPLIES(invoke->IsInvokeStaticOrDirect(),
1866 !invoke->AsInvokeStaticOrDirect()->IsStringInit());
1867
1868 ReceiverArg receiver_arg = (invoke->GetInvokeType() == InvokeType::kStatic)
1869 ? ReceiverArg::kNone
1870 : (is_unresolved ? ReceiverArg::kPlainArg : ReceiverArg::kNullCheckedArg);
1871 if (!SetupInvokeArguments(invoke, operands, shorty, receiver_arg)) {
1872 return false;
1873 }
1874
1875 AppendInstruction(invoke);
1876 latest_result_ = invoke;
1877
1878 return true;
1879 }
1880
BuildSimpleIntrinsic(ArtMethod * method,uint32_t dex_pc,const InstructionOperands & operands,const char * shorty)1881 bool HInstructionBuilder::BuildSimpleIntrinsic(ArtMethod* method,
1882 uint32_t dex_pc,
1883 const InstructionOperands& operands,
1884 const char* shorty) {
1885 Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic());
1886 DCHECK_NE(intrinsic, Intrinsics::kNone);
1887 constexpr DataType::Type kInt32 = DataType::Type::kInt32;
1888 constexpr DataType::Type kInt64 = DataType::Type::kInt64;
1889 constexpr DataType::Type kFloat32 = DataType::Type::kFloat32;
1890 constexpr DataType::Type kFloat64 = DataType::Type::kFloat64;
1891 ReceiverArg receiver_arg = method->IsStatic() ? ReceiverArg::kNone : ReceiverArg::kNullCheckedArg;
1892 HInstruction* instruction = nullptr;
1893 switch (intrinsic) {
1894 case Intrinsics::kIntegerRotateRight:
1895 case Intrinsics::kIntegerRotateLeft:
1896 // For rotate left, we negate the distance below.
1897 instruction = new (allocator_) HRor(kInt32, /*value=*/ nullptr, /*distance=*/ nullptr);
1898 break;
1899 case Intrinsics::kLongRotateRight:
1900 case Intrinsics::kLongRotateLeft:
1901 // For rotate left, we negate the distance below.
1902 instruction = new (allocator_) HRor(kInt64, /*value=*/ nullptr, /*distance=*/ nullptr);
1903 break;
1904 case Intrinsics::kIntegerCompare:
1905 instruction = new (allocator_) HCompare(
1906 kInt32, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1907 break;
1908 case Intrinsics::kLongCompare:
1909 instruction = new (allocator_) HCompare(
1910 kInt64, /*first=*/ nullptr, /*second=*/ nullptr, ComparisonBias::kNoBias, dex_pc);
1911 break;
1912 case Intrinsics::kIntegerSignum:
1913 instruction = new (allocator_) HCompare(
1914 kInt32, /*first=*/ nullptr, graph_->GetIntConstant(0), ComparisonBias::kNoBias, dex_pc);
1915 break;
1916 case Intrinsics::kLongSignum:
1917 instruction = new (allocator_) HCompare(
1918 kInt64, /*first=*/ nullptr, graph_->GetLongConstant(0), ComparisonBias::kNoBias, dex_pc);
1919 break;
1920 case Intrinsics::kFloatIsNaN:
1921 case Intrinsics::kDoubleIsNaN: {
1922 // IsNaN(x) is the same as x != x.
1923 instruction = new (allocator_) HNotEqual(/*first=*/ nullptr, /*second=*/ nullptr, dex_pc);
1924 instruction->AsCondition()->SetBias(ComparisonBias::kLtBias);
1925 break;
1926 }
1927 case Intrinsics::kStringCharAt:
1928 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1929 instruction = new (allocator_) HArrayGet(/*array=*/ nullptr,
1930 /*index=*/ nullptr,
1931 DataType::Type::kUint16,
1932 SideEffects::None(), // Strings are immutable.
1933 dex_pc,
1934 /*is_string_char_at=*/ true);
1935 break;
1936 case Intrinsics::kStringIsEmpty:
1937 case Intrinsics::kStringLength:
1938 // We treat String as an array to allow DCE and BCE to seamlessly work on strings.
1939 // For String.isEmpty(), we add a comparison with 0 below.
1940 instruction =
1941 new (allocator_) HArrayLength(/*array=*/ nullptr, dex_pc, /* is_string_length= */ true);
1942 break;
1943 case Intrinsics::kUnsafeLoadFence:
1944 case Intrinsics::kJdkUnsafeLoadFence:
1945 receiver_arg = ReceiverArg::kNullCheckedOnly;
1946 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1947 break;
1948 case Intrinsics::kUnsafeStoreFence:
1949 case Intrinsics::kJdkUnsafeStoreFence:
1950 receiver_arg = ReceiverArg::kNullCheckedOnly;
1951 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1952 break;
1953 case Intrinsics::kUnsafeFullFence:
1954 case Intrinsics::kJdkUnsafeFullFence:
1955 receiver_arg = ReceiverArg::kNullCheckedOnly;
1956 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1957 break;
1958 case Intrinsics::kVarHandleFullFence:
1959 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyAny, dex_pc);
1960 break;
1961 case Intrinsics::kVarHandleAcquireFence:
1962 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1963 break;
1964 case Intrinsics::kVarHandleReleaseFence:
1965 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kAnyStore, dex_pc);
1966 break;
1967 case Intrinsics::kVarHandleLoadLoadFence:
1968 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kLoadAny, dex_pc);
1969 break;
1970 case Intrinsics::kVarHandleStoreStoreFence:
1971 instruction = new (allocator_) HMemoryBarrier(MemBarrierKind::kStoreStore, dex_pc);
1972 break;
1973 case Intrinsics::kMathMinIntInt:
1974 instruction = new (allocator_) HMin(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1975 break;
1976 case Intrinsics::kMathMinLongLong:
1977 instruction = new (allocator_) HMin(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1978 break;
1979 case Intrinsics::kMathMinFloatFloat:
1980 instruction = new (allocator_) HMin(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1981 break;
1982 case Intrinsics::kMathMinDoubleDouble:
1983 instruction = new (allocator_) HMin(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1984 break;
1985 case Intrinsics::kMathMaxIntInt:
1986 instruction = new (allocator_) HMax(kInt32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1987 break;
1988 case Intrinsics::kMathMaxLongLong:
1989 instruction = new (allocator_) HMax(kInt64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1990 break;
1991 case Intrinsics::kMathMaxFloatFloat:
1992 instruction = new (allocator_) HMax(kFloat32, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1993 break;
1994 case Intrinsics::kMathMaxDoubleDouble:
1995 instruction = new (allocator_) HMax(kFloat64, /*left=*/ nullptr, /*right=*/ nullptr, dex_pc);
1996 break;
1997 case Intrinsics::kMathAbsInt:
1998 instruction = new (allocator_) HAbs(kInt32, /*input=*/ nullptr, dex_pc);
1999 break;
2000 case Intrinsics::kMathAbsLong:
2001 instruction = new (allocator_) HAbs(kInt64, /*input=*/ nullptr, dex_pc);
2002 break;
2003 case Intrinsics::kMathAbsFloat:
2004 instruction = new (allocator_) HAbs(kFloat32, /*input=*/ nullptr, dex_pc);
2005 break;
2006 case Intrinsics::kMathAbsDouble:
2007 instruction = new (allocator_) HAbs(kFloat64, /*input=*/ nullptr, dex_pc);
2008 break;
2009 default:
2010 // We do not have intermediate representation for other intrinsics.
2011 DCHECK(!IsIntrinsicWithSpecializedHir(intrinsic));
2012 return false;
2013 }
2014 DCHECK(instruction != nullptr);
2015 if (!SetupInvokeArguments(instruction, operands, shorty, receiver_arg)) {
2016 return false;
2017 }
2018
2019 switch (intrinsic) {
2020 case Intrinsics::kIntegerRotateLeft:
2021 case Intrinsics::kLongRotateLeft: {
2022 // Negate the distance value for rotate left.
2023 DCHECK(instruction->IsRor());
2024 HNeg* neg = new (allocator_) HNeg(kInt32, instruction->InputAt(1u));
2025 AppendInstruction(neg);
2026 instruction->SetRawInputAt(1u, neg);
2027 break;
2028 }
2029 case Intrinsics::kFloatIsNaN:
2030 case Intrinsics::kDoubleIsNaN:
2031 // Set the second input to be the same as first.
2032 DCHECK(instruction->IsNotEqual());
2033 DCHECK(instruction->InputAt(1u) == nullptr);
2034 instruction->SetRawInputAt(1u, instruction->InputAt(0u));
2035 break;
2036 case Intrinsics::kStringCharAt: {
2037 // Add bounds check.
2038 HInstruction* array = instruction->InputAt(0u);
2039 HInstruction* index = instruction->InputAt(1u);
2040 HInstruction* length =
2041 new (allocator_) HArrayLength(array, dex_pc, /*is_string_length=*/ true);
2042 AppendInstruction(length);
2043 HBoundsCheck* bounds_check =
2044 new (allocator_) HBoundsCheck(index, length, dex_pc, /*is_string_char_at=*/ true);
2045 AppendInstruction(bounds_check);
2046 graph_->SetHasBoundsChecks(true);
2047 instruction->SetRawInputAt(1u, bounds_check);
2048 break;
2049 }
2050 case Intrinsics::kStringIsEmpty: {
2051 // Compare the length with 0.
2052 DCHECK(instruction->IsArrayLength());
2053 AppendInstruction(instruction);
2054 HEqual* equal = new (allocator_) HEqual(instruction, graph_->GetIntConstant(0), dex_pc);
2055 instruction = equal;
2056 break;
2057 }
2058 default:
2059 break;
2060 }
2061
2062 AppendInstruction(instruction);
2063 latest_result_ = instruction;
2064
2065 return true;
2066 }
2067
HandleStringInit(HInvoke * invoke,const InstructionOperands & operands,const char * shorty)2068 bool HInstructionBuilder::HandleStringInit(HInvoke* invoke,
2069 const InstructionOperands& operands,
2070 const char* shorty) {
2071 DCHECK(invoke->IsInvokeStaticOrDirect());
2072 DCHECK(invoke->AsInvokeStaticOrDirect()->IsStringInit());
2073
2074 if (!SetupInvokeArguments(invoke, operands, shorty, ReceiverArg::kIgnored)) {
2075 return false;
2076 }
2077
2078 AppendInstruction(invoke);
2079
2080 // This is a StringFactory call, not an actual String constructor. Its result
2081 // replaces the empty String pre-allocated by NewInstance.
2082 uint32_t orig_this_reg = operands.GetOperand(0);
2083 HInstruction* arg_this = LoadLocal(orig_this_reg, DataType::Type::kReference);
2084
2085 // Replacing the NewInstance might render it redundant. Keep a list of these
2086 // to be visited once it is clear whether it has remaining uses.
2087 if (arg_this->IsNewInstance()) {
2088 ssa_builder_->AddUninitializedString(arg_this->AsNewInstance());
2089 } else {
2090 DCHECK(arg_this->IsPhi());
2091 // We can get a phi as input of a String.<init> if there is a loop between the
2092 // allocation and the String.<init> call. As we don't know which other phis might alias
2093 // with `arg_this`, we keep a record of those invocations so we can later replace
2094 // the allocation with the invocation.
2095 // Add the actual 'this' input so the analysis knows what is the allocation instruction.
2096 // The input will be removed during the analysis.
2097 invoke->AddInput(arg_this);
2098 ssa_builder_->AddUninitializedStringPhi(invoke);
2099 }
2100 // Walk over all vregs and replace any occurrence of `arg_this` with `invoke`.
2101 for (size_t vreg = 0, e = current_locals_->size(); vreg < e; ++vreg) {
2102 if ((*current_locals_)[vreg] == arg_this) {
2103 (*current_locals_)[vreg] = invoke;
2104 }
2105 }
2106 return true;
2107 }
2108
GetFieldAccessType(const DexFile & dex_file,uint16_t field_index)2109 static DataType::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
2110 const dex::FieldId& field_id = dex_file.GetFieldId(field_index);
2111 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
2112 return DataType::FromShorty(type[0]);
2113 }
2114
BuildInstanceFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2115 bool HInstructionBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
2116 uint32_t dex_pc,
2117 bool is_put) {
2118 uint32_t source_or_dest_reg = instruction.VRegA_22c();
2119 uint32_t obj_reg = instruction.VRegB_22c();
2120 uint16_t field_index = instruction.VRegC_22c();
2121
2122 ScopedObjectAccess soa(Thread::Current());
2123 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ false, is_put);
2124
2125 // Generate an explicit null check on the reference, unless the field access
2126 // is unresolved. In that case, we rely on the runtime to perform various
2127 // checks first, followed by a null check.
2128 HInstruction* object = (resolved_field == nullptr)
2129 ? LoadLocal(obj_reg, DataType::Type::kReference)
2130 : LoadNullCheckedLocal(obj_reg, dex_pc);
2131
2132 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2133 if (is_put) {
2134 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2135 HInstruction* field_set = nullptr;
2136 if (resolved_field == nullptr) {
2137 MaybeRecordStat(compilation_stats_,
2138 MethodCompilationStat::kUnresolvedField);
2139 field_set = new (allocator_) HUnresolvedInstanceFieldSet(object,
2140 value,
2141 field_type,
2142 field_index,
2143 dex_pc);
2144 } else {
2145 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2146 field_set = new (allocator_) HInstanceFieldSet(object,
2147 value,
2148 resolved_field,
2149 field_type,
2150 resolved_field->GetOffset(),
2151 resolved_field->IsVolatile(),
2152 field_index,
2153 class_def_index,
2154 *dex_file_,
2155 dex_pc);
2156 }
2157 AppendInstruction(field_set);
2158 } else {
2159 HInstruction* field_get = nullptr;
2160 if (resolved_field == nullptr) {
2161 MaybeRecordStat(compilation_stats_,
2162 MethodCompilationStat::kUnresolvedField);
2163 field_get = new (allocator_) HUnresolvedInstanceFieldGet(object,
2164 field_type,
2165 field_index,
2166 dex_pc);
2167 } else {
2168 uint16_t class_def_index = resolved_field->GetDeclaringClass()->GetDexClassDefIndex();
2169 field_get = new (allocator_) HInstanceFieldGet(object,
2170 resolved_field,
2171 field_type,
2172 resolved_field->GetOffset(),
2173 resolved_field->IsVolatile(),
2174 field_index,
2175 class_def_index,
2176 *dex_file_,
2177 dex_pc);
2178 }
2179 AppendInstruction(field_get);
2180 UpdateLocal(source_or_dest_reg, field_get);
2181 }
2182
2183 return true;
2184 }
2185
BuildUnresolvedStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type field_type)2186 void HInstructionBuilder::BuildUnresolvedStaticFieldAccess(const Instruction& instruction,
2187 uint32_t dex_pc,
2188 bool is_put,
2189 DataType::Type field_type) {
2190 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2191 uint16_t field_index = instruction.VRegB_21c();
2192
2193 if (is_put) {
2194 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2195 AppendInstruction(
2196 new (allocator_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
2197 } else {
2198 AppendInstruction(new (allocator_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
2199 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2200 }
2201 }
2202
ResolveField(uint16_t field_idx,bool is_static,bool is_put)2203 ArtField* HInstructionBuilder::ResolveField(uint16_t field_idx, bool is_static, bool is_put) {
2204 ScopedObjectAccess soa(Thread::Current());
2205
2206 ClassLinker* class_linker = dex_compilation_unit_->GetClassLinker();
2207 Handle<mirror::ClassLoader> class_loader = dex_compilation_unit_->GetClassLoader();
2208
2209 ArtField* resolved_field = class_linker->ResolveFieldJLS(field_idx,
2210 dex_compilation_unit_->GetDexCache(),
2211 class_loader);
2212 DCHECK_EQ(resolved_field == nullptr, soa.Self()->IsExceptionPending())
2213 << "field="
2214 << ((resolved_field == nullptr) ? "null" : resolved_field->PrettyField())
2215 << ", exception="
2216 << (soa.Self()->IsExceptionPending() ? soa.Self()->GetException()->Dump() : "null");
2217 if (UNLIKELY(resolved_field == nullptr)) {
2218 // Clean up any exception left by field resolution.
2219 soa.Self()->ClearException();
2220 return nullptr;
2221 }
2222
2223 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
2224 return nullptr;
2225 }
2226
2227 // Check access.
2228 Handle<mirror::Class> compiling_class = dex_compilation_unit_->GetCompilingClass();
2229 if (compiling_class == nullptr) {
2230 // Check if the declaring class or referencing class is accessible.
2231 SamePackageCompare same_package(*dex_compilation_unit_);
2232 ObjPtr<mirror::Class> declaring_class = resolved_field->GetDeclaringClass();
2233 bool declaring_class_accessible = declaring_class->IsPublic() || same_package(declaring_class);
2234 if (!declaring_class_accessible) {
2235 // It is possible to access members from an inaccessible superclass
2236 // by referencing them through an accessible subclass.
2237 ObjPtr<mirror::Class> referenced_class = class_linker->LookupResolvedType(
2238 dex_compilation_unit_->GetDexFile()->GetFieldId(field_idx).class_idx_,
2239 dex_compilation_unit_->GetDexCache().Get(),
2240 class_loader.Get());
2241 DCHECK(referenced_class != nullptr); // Must have been resolved when resolving the field.
2242 if (!referenced_class->IsPublic() && !same_package(referenced_class)) {
2243 return nullptr;
2244 }
2245 }
2246 // Check whether the field itself is accessible.
2247 // Since the referrer is unresolved but the field is resolved, it cannot be
2248 // inside the same class, so a private field is known to be inaccessible.
2249 // And without a resolved referrer, we cannot check for protected member access
2250 // in superlass, so we handle only access to public member or within the package.
2251 if (resolved_field->IsPrivate() ||
2252 (!resolved_field->IsPublic() && !declaring_class_accessible)) {
2253 return nullptr;
2254 }
2255 } else if (!compiling_class->CanAccessResolvedField(resolved_field->GetDeclaringClass(),
2256 resolved_field,
2257 dex_compilation_unit_->GetDexCache().Get(),
2258 field_idx)) {
2259 return nullptr;
2260 }
2261
2262 if (is_put) {
2263 if (resolved_field->IsFinal() &&
2264 (compiling_class.Get() != resolved_field->GetDeclaringClass())) {
2265 // Final fields can only be updated within their own class.
2266 // TODO: Only allow it in constructors. b/34966607.
2267 return nullptr;
2268 }
2269
2270 // Note: We do not need to resolve the field type for `get` opcodes.
2271 StackArtFieldHandleScope<1> rhs(soa.Self());
2272 ReflectiveHandle<ArtField> resolved_field_handle(rhs.NewHandle(resolved_field));
2273 if (resolved_field->ResolveType().IsNull()) {
2274 // ArtField::ResolveType() may fail as evidenced with a dexing bug (b/78788577).
2275 soa.Self()->ClearException();
2276 return nullptr; // Failure
2277 }
2278 resolved_field = resolved_field_handle.Get();
2279 }
2280
2281 return resolved_field;
2282 }
2283
BuildStaticFieldAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put)2284 void HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
2285 uint32_t dex_pc,
2286 bool is_put) {
2287 uint32_t source_or_dest_reg = instruction.VRegA_21c();
2288 uint16_t field_index = instruction.VRegB_21c();
2289
2290 ScopedObjectAccess soa(Thread::Current());
2291 ArtField* resolved_field = ResolveField(field_index, /* is_static= */ true, is_put);
2292
2293 if (resolved_field == nullptr) {
2294 MaybeRecordStat(compilation_stats_,
2295 MethodCompilationStat::kUnresolvedField);
2296 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2297 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2298 return;
2299 }
2300
2301 DataType::Type field_type = GetFieldAccessType(*dex_file_, field_index);
2302
2303 Handle<mirror::Class> klass =
2304 graph_->GetHandleCache()->NewHandle(resolved_field->GetDeclaringClass());
2305 HLoadClass* constant = BuildLoadClass(klass->GetDexTypeIndex(),
2306 klass->GetDexFile(),
2307 klass,
2308 dex_pc,
2309 /* needs_access_check= */ false);
2310
2311 if (constant == nullptr) {
2312 // The class cannot be referenced from this compiled code. Generate
2313 // an unresolved access.
2314 MaybeRecordStat(compilation_stats_,
2315 MethodCompilationStat::kUnresolvedFieldNotAFastAccess);
2316 BuildUnresolvedStaticFieldAccess(instruction, dex_pc, is_put, field_type);
2317 return;
2318 }
2319
2320 HInstruction* cls = constant;
2321 if (!IsInitialized(klass.Get())) {
2322 cls = new (allocator_) HClinitCheck(constant, dex_pc);
2323 AppendInstruction(cls);
2324 }
2325
2326 uint16_t class_def_index = klass->GetDexClassDefIndex();
2327 if (is_put) {
2328 // We need to keep the class alive before loading the value.
2329 HInstruction* value = LoadLocal(source_or_dest_reg, field_type);
2330 DCHECK_EQ(HPhi::ToPhiType(value->GetType()), HPhi::ToPhiType(field_type));
2331 AppendInstruction(new (allocator_) HStaticFieldSet(cls,
2332 value,
2333 resolved_field,
2334 field_type,
2335 resolved_field->GetOffset(),
2336 resolved_field->IsVolatile(),
2337 field_index,
2338 class_def_index,
2339 *dex_file_,
2340 dex_pc));
2341 } else {
2342 AppendInstruction(new (allocator_) HStaticFieldGet(cls,
2343 resolved_field,
2344 field_type,
2345 resolved_field->GetOffset(),
2346 resolved_field->IsVolatile(),
2347 field_index,
2348 class_def_index,
2349 *dex_file_,
2350 dex_pc));
2351 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2352 }
2353 }
2354
BuildCheckedDivRem(uint16_t out_vreg,uint16_t first_vreg,int64_t second_vreg_or_constant,uint32_t dex_pc,DataType::Type type,bool second_is_constant,bool isDiv)2355 void HInstructionBuilder::BuildCheckedDivRem(uint16_t out_vreg,
2356 uint16_t first_vreg,
2357 int64_t second_vreg_or_constant,
2358 uint32_t dex_pc,
2359 DataType::Type type,
2360 bool second_is_constant,
2361 bool isDiv) {
2362 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
2363
2364 HInstruction* first = LoadLocal(first_vreg, type);
2365 HInstruction* second = nullptr;
2366 if (second_is_constant) {
2367 if (type == DataType::Type::kInt32) {
2368 second = graph_->GetIntConstant(second_vreg_or_constant, dex_pc);
2369 } else {
2370 second = graph_->GetLongConstant(second_vreg_or_constant, dex_pc);
2371 }
2372 } else {
2373 second = LoadLocal(second_vreg_or_constant, type);
2374 }
2375
2376 if (!second_is_constant ||
2377 (type == DataType::Type::kInt32 && second->AsIntConstant()->GetValue() == 0) ||
2378 (type == DataType::Type::kInt64 && second->AsLongConstant()->GetValue() == 0)) {
2379 second = new (allocator_) HDivZeroCheck(second, dex_pc);
2380 AppendInstruction(second);
2381 }
2382
2383 if (isDiv) {
2384 AppendInstruction(new (allocator_) HDiv(type, first, second, dex_pc));
2385 } else {
2386 AppendInstruction(new (allocator_) HRem(type, first, second, dex_pc));
2387 }
2388 UpdateLocal(out_vreg, current_block_->GetLastInstruction());
2389 }
2390
BuildArrayAccess(const Instruction & instruction,uint32_t dex_pc,bool is_put,DataType::Type anticipated_type)2391 void HInstructionBuilder::BuildArrayAccess(const Instruction& instruction,
2392 uint32_t dex_pc,
2393 bool is_put,
2394 DataType::Type anticipated_type) {
2395 uint8_t source_or_dest_reg = instruction.VRegA_23x();
2396 uint8_t array_reg = instruction.VRegB_23x();
2397 uint8_t index_reg = instruction.VRegC_23x();
2398
2399 HInstruction* object = LoadNullCheckedLocal(array_reg, dex_pc);
2400 HInstruction* length = new (allocator_) HArrayLength(object, dex_pc);
2401 AppendInstruction(length);
2402 HInstruction* index = LoadLocal(index_reg, DataType::Type::kInt32);
2403 index = new (allocator_) HBoundsCheck(index, length, dex_pc);
2404 AppendInstruction(index);
2405 if (is_put) {
2406 HInstruction* value = LoadLocal(source_or_dest_reg, anticipated_type);
2407 // TODO: Insert a type check node if the type is Object.
2408 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2409 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2410 AppendInstruction(aset);
2411 } else {
2412 HArrayGet* aget = new (allocator_) HArrayGet(object, index, anticipated_type, dex_pc);
2413 ssa_builder_->MaybeAddAmbiguousArrayGet(aget);
2414 AppendInstruction(aget);
2415 UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction());
2416 }
2417 graph_->SetHasBoundsChecks(true);
2418 }
2419
BuildNewArray(uint32_t dex_pc,dex::TypeIndex type_index,HInstruction * length)2420 HNewArray* HInstructionBuilder::BuildNewArray(uint32_t dex_pc,
2421 dex::TypeIndex type_index,
2422 HInstruction* length) {
2423 HLoadClass* cls = BuildLoadClass(type_index, dex_pc);
2424
2425 const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(type_index));
2426 DCHECK_EQ(descriptor[0], '[');
2427 size_t component_type_shift = Primitive::ComponentSizeShift(Primitive::GetType(descriptor[1]));
2428
2429 HNewArray* new_array = new (allocator_) HNewArray(cls, length, dex_pc, component_type_shift);
2430 AppendInstruction(new_array);
2431 return new_array;
2432 }
2433
BuildFilledNewArray(uint32_t dex_pc,dex::TypeIndex type_index,const InstructionOperands & operands)2434 HNewArray* HInstructionBuilder::BuildFilledNewArray(uint32_t dex_pc,
2435 dex::TypeIndex type_index,
2436 const InstructionOperands& operands) {
2437 const size_t number_of_operands = operands.GetNumberOfOperands();
2438 HInstruction* length = graph_->GetIntConstant(number_of_operands, dex_pc);
2439
2440 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
2441 const char* descriptor = dex_file_->GetTypeDescriptor(type_index);
2442 DCHECK_EQ(descriptor[0], '[') << descriptor;
2443 char primitive = descriptor[1];
2444 DCHECK(primitive == 'I'
2445 || primitive == 'L'
2446 || primitive == '[') << descriptor;
2447 bool is_reference_array = (primitive == 'L') || (primitive == '[');
2448 DataType::Type type = is_reference_array ? DataType::Type::kReference : DataType::Type::kInt32;
2449
2450 for (size_t i = 0; i < number_of_operands; ++i) {
2451 HInstruction* value = LoadLocal(operands.GetOperand(i), type);
2452 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2453 HArraySet* aset = new (allocator_) HArraySet(new_array, index, value, type, dex_pc);
2454 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2455 AppendInstruction(aset);
2456 }
2457 latest_result_ = new_array;
2458
2459 return new_array;
2460 }
2461
2462 template <typename T>
BuildFillArrayData(HInstruction * object,const T * data,uint32_t element_count,DataType::Type anticipated_type,uint32_t dex_pc)2463 void HInstructionBuilder::BuildFillArrayData(HInstruction* object,
2464 const T* data,
2465 uint32_t element_count,
2466 DataType::Type anticipated_type,
2467 uint32_t dex_pc) {
2468 for (uint32_t i = 0; i < element_count; ++i) {
2469 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2470 HInstruction* value = graph_->GetIntConstant(data[i], dex_pc);
2471 HArraySet* aset = new (allocator_) HArraySet(object, index, value, anticipated_type, dex_pc);
2472 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2473 AppendInstruction(aset);
2474 }
2475 }
2476
BuildFillArrayData(const Instruction & instruction,uint32_t dex_pc)2477 void HInstructionBuilder::BuildFillArrayData(const Instruction& instruction, uint32_t dex_pc) {
2478 HInstruction* array = LoadNullCheckedLocal(instruction.VRegA_31t(), dex_pc);
2479
2480 int32_t payload_offset = instruction.VRegB_31t() + dex_pc;
2481 const Instruction::ArrayDataPayload* payload =
2482 reinterpret_cast<const Instruction::ArrayDataPayload*>(
2483 code_item_accessor_.Insns() + payload_offset);
2484 const uint8_t* data = payload->data;
2485 uint32_t element_count = payload->element_count;
2486
2487 if (element_count == 0u) {
2488 // For empty payload we emit only the null check above.
2489 return;
2490 }
2491
2492 HInstruction* length = new (allocator_) HArrayLength(array, dex_pc);
2493 AppendInstruction(length);
2494
2495 // Implementation of this DEX instruction seems to be that the bounds check is
2496 // done before doing any stores.
2497 HInstruction* last_index = graph_->GetIntConstant(payload->element_count - 1, dex_pc);
2498 AppendInstruction(new (allocator_) HBoundsCheck(last_index, length, dex_pc));
2499
2500 switch (payload->element_width) {
2501 case 1:
2502 BuildFillArrayData(array,
2503 reinterpret_cast<const int8_t*>(data),
2504 element_count,
2505 DataType::Type::kInt8,
2506 dex_pc);
2507 break;
2508 case 2:
2509 BuildFillArrayData(array,
2510 reinterpret_cast<const int16_t*>(data),
2511 element_count,
2512 DataType::Type::kInt16,
2513 dex_pc);
2514 break;
2515 case 4:
2516 BuildFillArrayData(array,
2517 reinterpret_cast<const int32_t*>(data),
2518 element_count,
2519 DataType::Type::kInt32,
2520 dex_pc);
2521 break;
2522 case 8:
2523 BuildFillWideArrayData(array,
2524 reinterpret_cast<const int64_t*>(data),
2525 element_count,
2526 dex_pc);
2527 break;
2528 default:
2529 LOG(FATAL) << "Unknown element width for " << payload->element_width;
2530 }
2531 graph_->SetHasBoundsChecks(true);
2532 }
2533
BuildFillWideArrayData(HInstruction * object,const int64_t * data,uint32_t element_count,uint32_t dex_pc)2534 void HInstructionBuilder::BuildFillWideArrayData(HInstruction* object,
2535 const int64_t* data,
2536 uint32_t element_count,
2537 uint32_t dex_pc) {
2538 for (uint32_t i = 0; i < element_count; ++i) {
2539 HInstruction* index = graph_->GetIntConstant(i, dex_pc);
2540 HInstruction* value = graph_->GetLongConstant(data[i], dex_pc);
2541 HArraySet* aset =
2542 new (allocator_) HArraySet(object, index, value, DataType::Type::kInt64, dex_pc);
2543 ssa_builder_->MaybeAddAmbiguousArraySet(aset);
2544 AppendInstruction(aset);
2545 }
2546 }
2547
BuildLoadString(dex::StringIndex string_index,uint32_t dex_pc)2548 void HInstructionBuilder::BuildLoadString(dex::StringIndex string_index, uint32_t dex_pc) {
2549 HLoadString* load_string =
2550 new (allocator_) HLoadString(graph_->GetCurrentMethod(), string_index, *dex_file_, dex_pc);
2551 HSharpening::ProcessLoadString(load_string,
2552 code_generator_,
2553 *dex_compilation_unit_,
2554 graph_->GetHandleCache()->GetHandles());
2555 AppendInstruction(load_string);
2556 }
2557
BuildLoadClass(dex::TypeIndex type_index,uint32_t dex_pc)2558 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index, uint32_t dex_pc) {
2559 ScopedObjectAccess soa(Thread::Current());
2560 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2561 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2562 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2563 return BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2564 }
2565
BuildLoadClass(dex::TypeIndex type_index,const DexFile & dex_file,Handle<mirror::Class> klass,uint32_t dex_pc,bool needs_access_check)2566 HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
2567 const DexFile& dex_file,
2568 Handle<mirror::Class> klass,
2569 uint32_t dex_pc,
2570 bool needs_access_check) {
2571 // Try to find a reference in the compiling dex file.
2572 const DexFile* actual_dex_file = &dex_file;
2573 if (!IsSameDexFile(dex_file, *dex_compilation_unit_->GetDexFile())) {
2574 dex::TypeIndex local_type_index =
2575 klass->FindTypeIndexInOtherDexFile(*dex_compilation_unit_->GetDexFile());
2576 if (local_type_index.IsValid()) {
2577 type_index = local_type_index;
2578 actual_dex_file = dex_compilation_unit_->GetDexFile();
2579 }
2580 }
2581
2582 // We cannot use the referrer's class load kind if we need to do an access check.
2583 // If the `klass` is unresolved, we need access check with the exception of the referrer's
2584 // class, see LoadClassNeedsAccessCheck(), so the `!needs_access_check` check is enough.
2585 // Otherwise, also check if the `klass` is the same as the compiling class, which also
2586 // conveniently rejects the case of unresolved compiling class.
2587 bool is_referrers_class =
2588 !needs_access_check &&
2589 (klass == nullptr || outer_compilation_unit_->GetCompilingClass().Get() == klass.Get());
2590 // Note: `klass` must be from `graph_->GetHandleCache()`.
2591 HLoadClass* load_class = new (allocator_) HLoadClass(
2592 graph_->GetCurrentMethod(),
2593 type_index,
2594 *actual_dex_file,
2595 klass,
2596 is_referrers_class,
2597 dex_pc,
2598 needs_access_check);
2599
2600 HLoadClass::LoadKind load_kind = HSharpening::ComputeLoadClassKind(load_class,
2601 code_generator_,
2602 *dex_compilation_unit_);
2603
2604 if (load_kind == HLoadClass::LoadKind::kInvalid) {
2605 // We actually cannot reference this class, we're forced to bail.
2606 return nullptr;
2607 }
2608 // Load kind must be set before inserting the instruction into the graph.
2609 load_class->SetLoadKind(load_kind);
2610 AppendInstruction(load_class);
2611 return load_class;
2612 }
2613
ResolveClass(ScopedObjectAccess & soa,dex::TypeIndex type_index)2614 Handle<mirror::Class> HInstructionBuilder::ResolveClass(ScopedObjectAccess& soa,
2615 dex::TypeIndex type_index) {
2616 auto it = class_cache_.find(type_index);
2617 if (it != class_cache_.end()) {
2618 return it->second;
2619 }
2620
2621 ObjPtr<mirror::Class> klass = dex_compilation_unit_->GetClassLinker()->ResolveType(
2622 type_index, dex_compilation_unit_->GetDexCache(), dex_compilation_unit_->GetClassLoader());
2623 DCHECK_EQ(klass == nullptr, soa.Self()->IsExceptionPending());
2624 soa.Self()->ClearException(); // Clean up the exception left by type resolution if any.
2625
2626 Handle<mirror::Class> h_klass = graph_->GetHandleCache()->NewHandle(klass);
2627 class_cache_.Put(type_index, h_klass);
2628 return h_klass;
2629 }
2630
LoadClassNeedsAccessCheck(dex::TypeIndex type_index,ObjPtr<mirror::Class> klass)2631 bool HInstructionBuilder::LoadClassNeedsAccessCheck(dex::TypeIndex type_index,
2632 ObjPtr<mirror::Class> klass) {
2633 if (klass == nullptr) {
2634 // If the class is unresolved, we can avoid access checks only for references to
2635 // the compiling class as determined by checking the descriptor and ClassLoader.
2636 if (outer_compilation_unit_->GetCompilingClass() != nullptr) {
2637 // Compiling class is resolved, so different from the unresolved class.
2638 return true;
2639 }
2640 if (dex_compilation_unit_->GetClassLoader().Get() !=
2641 outer_compilation_unit_->GetClassLoader().Get()) {
2642 // Resolving the same descriptor in a different ClassLoader than the
2643 // defining loader of the compiling class shall either fail to find
2644 // the class definition, or find a different one.
2645 // (Assuming no custom ClassLoader hierarchy with circular delegation.)
2646 return true;
2647 }
2648 // Check if the class is the outer method's class.
2649 // For the same dex file compare type indexes, otherwise descriptors.
2650 const DexFile* outer_dex_file = outer_compilation_unit_->GetDexFile();
2651 const DexFile* inner_dex_file = dex_compilation_unit_->GetDexFile();
2652 const dex::ClassDef& outer_class_def =
2653 outer_dex_file->GetClassDef(outer_compilation_unit_->GetClassDefIndex());
2654 if (IsSameDexFile(*inner_dex_file, *outer_dex_file)) {
2655 if (type_index != outer_class_def.class_idx_) {
2656 return true;
2657 }
2658 } else {
2659 const std::string_view outer_descriptor =
2660 outer_dex_file->GetTypeDescriptorView(outer_class_def.class_idx_);
2661 const std::string_view target_descriptor =
2662 inner_dex_file->GetTypeDescriptorView(type_index);
2663 if (outer_descriptor != target_descriptor) {
2664 return true;
2665 }
2666 }
2667 // For inlined methods we also need to check if the compiling class
2668 // is public or in the same package as the inlined method's class.
2669 if (dex_compilation_unit_ != outer_compilation_unit_ &&
2670 (outer_class_def.access_flags_ & kAccPublic) == 0) {
2671 DCHECK(dex_compilation_unit_->GetCompilingClass() != nullptr);
2672 SamePackageCompare same_package(*outer_compilation_unit_);
2673 if (!same_package(dex_compilation_unit_->GetCompilingClass().Get())) {
2674 return true;
2675 }
2676 }
2677 return false;
2678 } else if (klass->IsPublic()) {
2679 return false;
2680 } else if (dex_compilation_unit_->GetCompilingClass() != nullptr) {
2681 return !dex_compilation_unit_->GetCompilingClass()->CanAccess(klass);
2682 } else {
2683 SamePackageCompare same_package(*dex_compilation_unit_);
2684 return !same_package(klass);
2685 }
2686 }
2687
BuildLoadMethodHandle(uint16_t method_handle_index,uint32_t dex_pc)2688 void HInstructionBuilder::BuildLoadMethodHandle(uint16_t method_handle_index, uint32_t dex_pc) {
2689 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2690 HLoadMethodHandle* load_method_handle = new (allocator_) HLoadMethodHandle(
2691 graph_->GetCurrentMethod(), method_handle_index, dex_file, dex_pc);
2692 AppendInstruction(load_method_handle);
2693 }
2694
BuildLoadMethodType(dex::ProtoIndex proto_index,uint32_t dex_pc)2695 void HInstructionBuilder::BuildLoadMethodType(dex::ProtoIndex proto_index, uint32_t dex_pc) {
2696 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2697 HLoadMethodType* load_method_type =
2698 new (allocator_) HLoadMethodType(graph_->GetCurrentMethod(), proto_index, dex_file, dex_pc);
2699 if (!code_generator_->GetCompilerOptions().IsJitCompiler()) {
2700 load_method_type->SetLoadKind(HLoadMethodType::LoadKind::kBssEntry);
2701 }
2702 AppendInstruction(load_method_type);
2703 }
2704
BuildTypeCheck(bool is_instance_of,HInstruction * object,dex::TypeIndex type_index,uint32_t dex_pc)2705 void HInstructionBuilder::BuildTypeCheck(bool is_instance_of,
2706 HInstruction* object,
2707 dex::TypeIndex type_index,
2708 uint32_t dex_pc) {
2709 ScopedObjectAccess soa(Thread::Current());
2710 const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
2711 Handle<mirror::Class> klass = ResolveClass(soa, type_index);
2712 bool needs_access_check = LoadClassNeedsAccessCheck(type_index, klass.Get());
2713 TypeCheckKind check_kind = HSharpening::ComputeTypeCheckKind(
2714 klass.Get(), code_generator_, needs_access_check);
2715
2716 HInstruction* class_or_null = nullptr;
2717 HIntConstant* bitstring_path_to_root = nullptr;
2718 HIntConstant* bitstring_mask = nullptr;
2719 if (check_kind == TypeCheckKind::kBitstringCheck) {
2720 // TODO: Allow using the bitstring check also if we need an access check.
2721 DCHECK(!needs_access_check);
2722 class_or_null = graph_->GetNullConstant(dex_pc);
2723 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2724 uint32_t path_to_root =
2725 SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootForTarget(klass.Get());
2726 uint32_t mask = SubtypeCheck<ObjPtr<mirror::Class>>::GetEncodedPathToRootMask(klass.Get());
2727 bitstring_path_to_root = graph_->GetIntConstant(static_cast<int32_t>(path_to_root), dex_pc);
2728 bitstring_mask = graph_->GetIntConstant(static_cast<int32_t>(mask), dex_pc);
2729 } else {
2730 class_or_null = BuildLoadClass(type_index, dex_file, klass, dex_pc, needs_access_check);
2731 }
2732 DCHECK(class_or_null != nullptr);
2733
2734 if (is_instance_of) {
2735 AppendInstruction(new (allocator_) HInstanceOf(object,
2736 class_or_null,
2737 check_kind,
2738 klass,
2739 dex_pc,
2740 allocator_,
2741 bitstring_path_to_root,
2742 bitstring_mask));
2743 } else {
2744 // We emit a CheckCast followed by a BoundType. CheckCast is a statement
2745 // which may throw. If it succeeds BoundType sets the new type of `object`
2746 // for all subsequent uses.
2747 AppendInstruction(
2748 new (allocator_) HCheckCast(object,
2749 class_or_null,
2750 check_kind,
2751 klass,
2752 dex_pc,
2753 allocator_,
2754 bitstring_path_to_root,
2755 bitstring_mask));
2756 AppendInstruction(new (allocator_) HBoundType(object, dex_pc));
2757 }
2758 }
2759
BuildTypeCheck(const Instruction & instruction,uint8_t destination,uint8_t reference,dex::TypeIndex type_index,uint32_t dex_pc)2760 void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
2761 uint8_t destination,
2762 uint8_t reference,
2763 dex::TypeIndex type_index,
2764 uint32_t dex_pc) {
2765 HInstruction* object = LoadLocal(reference, DataType::Type::kReference);
2766 bool is_instance_of = instruction.Opcode() == Instruction::INSTANCE_OF;
2767
2768 BuildTypeCheck(is_instance_of, object, type_index, dex_pc);
2769
2770 if (is_instance_of) {
2771 UpdateLocal(destination, current_block_->GetLastInstruction());
2772 } else {
2773 DCHECK_EQ(instruction.Opcode(), Instruction::CHECK_CAST);
2774 UpdateLocal(reference, current_block_->GetLastInstruction());
2775 }
2776 }
2777
ProcessDexInstruction(const Instruction & instruction,uint32_t dex_pc)2778 bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, uint32_t dex_pc) {
2779 switch (instruction.Opcode()) {
2780 case Instruction::CONST_4: {
2781 int32_t register_index = instruction.VRegA();
2782 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_11n(), dex_pc);
2783 UpdateLocal(register_index, constant);
2784 break;
2785 }
2786
2787 case Instruction::CONST_16: {
2788 int32_t register_index = instruction.VRegA();
2789 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21s(), dex_pc);
2790 UpdateLocal(register_index, constant);
2791 break;
2792 }
2793
2794 case Instruction::CONST: {
2795 int32_t register_index = instruction.VRegA();
2796 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_31i(), dex_pc);
2797 UpdateLocal(register_index, constant);
2798 break;
2799 }
2800
2801 case Instruction::CONST_HIGH16: {
2802 int32_t register_index = instruction.VRegA();
2803 HIntConstant* constant = graph_->GetIntConstant(instruction.VRegB_21h() << 16, dex_pc);
2804 UpdateLocal(register_index, constant);
2805 break;
2806 }
2807
2808 case Instruction::CONST_WIDE_16: {
2809 int32_t register_index = instruction.VRegA();
2810 // Get 16 bits of constant value, sign extended to 64 bits.
2811 int64_t value = instruction.VRegB_21s();
2812 value <<= 48;
2813 value >>= 48;
2814 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2815 UpdateLocal(register_index, constant);
2816 break;
2817 }
2818
2819 case Instruction::CONST_WIDE_32: {
2820 int32_t register_index = instruction.VRegA();
2821 // Get 32 bits of constant value, sign extended to 64 bits.
2822 int64_t value = instruction.VRegB_31i();
2823 value <<= 32;
2824 value >>= 32;
2825 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2826 UpdateLocal(register_index, constant);
2827 break;
2828 }
2829
2830 case Instruction::CONST_WIDE: {
2831 int32_t register_index = instruction.VRegA();
2832 HLongConstant* constant = graph_->GetLongConstant(instruction.VRegB_51l(), dex_pc);
2833 UpdateLocal(register_index, constant);
2834 break;
2835 }
2836
2837 case Instruction::CONST_WIDE_HIGH16: {
2838 int32_t register_index = instruction.VRegA();
2839 int64_t value = static_cast<int64_t>(instruction.VRegB_21h()) << 48;
2840 HLongConstant* constant = graph_->GetLongConstant(value, dex_pc);
2841 UpdateLocal(register_index, constant);
2842 break;
2843 }
2844
2845 // Note that the SSA building will refine the types.
2846 case Instruction::MOVE:
2847 case Instruction::MOVE_FROM16:
2848 case Instruction::MOVE_16: {
2849 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt32);
2850 UpdateLocal(instruction.VRegA(), value);
2851 break;
2852 }
2853
2854 // Note that the SSA building will refine the types.
2855 case Instruction::MOVE_WIDE:
2856 case Instruction::MOVE_WIDE_FROM16:
2857 case Instruction::MOVE_WIDE_16: {
2858 HInstruction* value = LoadLocal(instruction.VRegB(), DataType::Type::kInt64);
2859 UpdateLocal(instruction.VRegA(), value);
2860 break;
2861 }
2862
2863 case Instruction::MOVE_OBJECT:
2864 case Instruction::MOVE_OBJECT_16:
2865 case Instruction::MOVE_OBJECT_FROM16: {
2866 // The verifier has no notion of a null type, so a move-object of constant 0
2867 // will lead to the same constant 0 in the destination register. To mimic
2868 // this behavior, we just pretend we haven't seen a type change (int to reference)
2869 // for the 0 constant and phis. We rely on our type propagation to eventually get the
2870 // types correct.
2871 uint32_t reg_number = instruction.VRegB();
2872 HInstruction* value = (*current_locals_)[reg_number];
2873 if (value->IsIntConstant()) {
2874 DCHECK_EQ(value->AsIntConstant()->GetValue(), 0);
2875 } else if (value->IsPhi()) {
2876 DCHECK(value->GetType() == DataType::Type::kInt32 ||
2877 value->GetType() == DataType::Type::kReference);
2878 } else {
2879 value = LoadLocal(reg_number, DataType::Type::kReference);
2880 }
2881 UpdateLocal(instruction.VRegA(), value);
2882 break;
2883 }
2884
2885 case Instruction::RETURN_VOID: {
2886 BuildReturn(instruction, DataType::Type::kVoid, dex_pc);
2887 break;
2888 }
2889
2890 #define IF_XX(comparison, cond) \
2891 case Instruction::IF_##cond: \
2892 If_21_22t<comparison, /* kCompareWithZero= */ false>(instruction, dex_pc); \
2893 break; \
2894 case Instruction::IF_##cond##Z: \
2895 If_21_22t<comparison, /* kCompareWithZero= */ true>(instruction, dex_pc); \
2896 break;
2897
2898 IF_XX(HEqual, EQ);
2899 IF_XX(HNotEqual, NE);
2900 IF_XX(HLessThan, LT);
2901 IF_XX(HLessThanOrEqual, LE);
2902 IF_XX(HGreaterThan, GT);
2903 IF_XX(HGreaterThanOrEqual, GE);
2904
2905 case Instruction::GOTO:
2906 case Instruction::GOTO_16:
2907 case Instruction::GOTO_32: {
2908 AppendInstruction(new (allocator_) HGoto(dex_pc));
2909 current_block_ = nullptr;
2910 break;
2911 }
2912
2913 case Instruction::RETURN: {
2914 BuildReturn(instruction, return_type_, dex_pc);
2915 break;
2916 }
2917
2918 case Instruction::RETURN_OBJECT: {
2919 BuildReturn(instruction, return_type_, dex_pc);
2920 break;
2921 }
2922
2923 case Instruction::RETURN_WIDE: {
2924 BuildReturn(instruction, return_type_, dex_pc);
2925 break;
2926 }
2927
2928 case Instruction::INVOKE_DIRECT:
2929 case Instruction::INVOKE_INTERFACE:
2930 case Instruction::INVOKE_STATIC:
2931 case Instruction::INVOKE_SUPER:
2932 case Instruction::INVOKE_VIRTUAL: {
2933 uint16_t method_idx = instruction.VRegB_35c();
2934 uint32_t args[5];
2935 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2936 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2937 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2938 return false;
2939 }
2940 break;
2941 }
2942
2943 case Instruction::INVOKE_DIRECT_RANGE:
2944 case Instruction::INVOKE_INTERFACE_RANGE:
2945 case Instruction::INVOKE_STATIC_RANGE:
2946 case Instruction::INVOKE_SUPER_RANGE:
2947 case Instruction::INVOKE_VIRTUAL_RANGE: {
2948 uint16_t method_idx = instruction.VRegB_3rc();
2949 RangeInstructionOperands operands(instruction.VRegC(), instruction.VRegA_3rc());
2950 if (!BuildInvoke(instruction, dex_pc, method_idx, operands)) {
2951 return false;
2952 }
2953 break;
2954 }
2955
2956 case Instruction::INVOKE_POLYMORPHIC: {
2957 uint16_t method_idx = instruction.VRegB_45cc();
2958 dex::ProtoIndex proto_idx(instruction.VRegH_45cc());
2959 uint32_t args[5];
2960 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2961 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2962 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2963 }
2964
2965 case Instruction::INVOKE_POLYMORPHIC_RANGE: {
2966 uint16_t method_idx = instruction.VRegB_4rcc();
2967 dex::ProtoIndex proto_idx(instruction.VRegH_4rcc());
2968 RangeInstructionOperands operands(instruction.VRegC_4rcc(), instruction.VRegA_4rcc());
2969 return BuildInvokePolymorphic(dex_pc, method_idx, proto_idx, operands);
2970 }
2971
2972 case Instruction::INVOKE_CUSTOM: {
2973 uint16_t call_site_idx = instruction.VRegB_35c();
2974 uint32_t args[5];
2975 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
2976 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
2977 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2978 }
2979
2980 case Instruction::INVOKE_CUSTOM_RANGE: {
2981 uint16_t call_site_idx = instruction.VRegB_3rc();
2982 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
2983 return BuildInvokeCustom(dex_pc, call_site_idx, operands);
2984 }
2985
2986 case Instruction::NEG_INT: {
2987 Unop_12x<HNeg>(instruction, DataType::Type::kInt32, dex_pc);
2988 break;
2989 }
2990
2991 case Instruction::NEG_LONG: {
2992 Unop_12x<HNeg>(instruction, DataType::Type::kInt64, dex_pc);
2993 break;
2994 }
2995
2996 case Instruction::NEG_FLOAT: {
2997 Unop_12x<HNeg>(instruction, DataType::Type::kFloat32, dex_pc);
2998 break;
2999 }
3000
3001 case Instruction::NEG_DOUBLE: {
3002 Unop_12x<HNeg>(instruction, DataType::Type::kFloat64, dex_pc);
3003 break;
3004 }
3005
3006 case Instruction::NOT_INT: {
3007 Unop_12x<HNot>(instruction, DataType::Type::kInt32, dex_pc);
3008 break;
3009 }
3010
3011 case Instruction::NOT_LONG: {
3012 Unop_12x<HNot>(instruction, DataType::Type::kInt64, dex_pc);
3013 break;
3014 }
3015
3016 case Instruction::INT_TO_LONG: {
3017 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt64, dex_pc);
3018 break;
3019 }
3020
3021 case Instruction::INT_TO_FLOAT: {
3022 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat32, dex_pc);
3023 break;
3024 }
3025
3026 case Instruction::INT_TO_DOUBLE: {
3027 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kFloat64, dex_pc);
3028 break;
3029 }
3030
3031 case Instruction::LONG_TO_INT: {
3032 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kInt32, dex_pc);
3033 break;
3034 }
3035
3036 case Instruction::LONG_TO_FLOAT: {
3037 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat32, dex_pc);
3038 break;
3039 }
3040
3041 case Instruction::LONG_TO_DOUBLE: {
3042 Conversion_12x(instruction, DataType::Type::kInt64, DataType::Type::kFloat64, dex_pc);
3043 break;
3044 }
3045
3046 case Instruction::FLOAT_TO_INT: {
3047 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt32, dex_pc);
3048 break;
3049 }
3050
3051 case Instruction::FLOAT_TO_LONG: {
3052 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kInt64, dex_pc);
3053 break;
3054 }
3055
3056 case Instruction::FLOAT_TO_DOUBLE: {
3057 Conversion_12x(instruction, DataType::Type::kFloat32, DataType::Type::kFloat64, dex_pc);
3058 break;
3059 }
3060
3061 case Instruction::DOUBLE_TO_INT: {
3062 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt32, dex_pc);
3063 break;
3064 }
3065
3066 case Instruction::DOUBLE_TO_LONG: {
3067 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kInt64, dex_pc);
3068 break;
3069 }
3070
3071 case Instruction::DOUBLE_TO_FLOAT: {
3072 Conversion_12x(instruction, DataType::Type::kFloat64, DataType::Type::kFloat32, dex_pc);
3073 break;
3074 }
3075
3076 case Instruction::INT_TO_BYTE: {
3077 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt8, dex_pc);
3078 break;
3079 }
3080
3081 case Instruction::INT_TO_SHORT: {
3082 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kInt16, dex_pc);
3083 break;
3084 }
3085
3086 case Instruction::INT_TO_CHAR: {
3087 Conversion_12x(instruction, DataType::Type::kInt32, DataType::Type::kUint16, dex_pc);
3088 break;
3089 }
3090
3091 case Instruction::ADD_INT: {
3092 Binop_23x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3093 break;
3094 }
3095
3096 case Instruction::ADD_LONG: {
3097 Binop_23x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3098 break;
3099 }
3100
3101 case Instruction::ADD_DOUBLE: {
3102 Binop_23x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3103 break;
3104 }
3105
3106 case Instruction::ADD_FLOAT: {
3107 Binop_23x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3108 break;
3109 }
3110
3111 case Instruction::SUB_INT: {
3112 Binop_23x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3113 break;
3114 }
3115
3116 case Instruction::SUB_LONG: {
3117 Binop_23x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3118 break;
3119 }
3120
3121 case Instruction::SUB_FLOAT: {
3122 Binop_23x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3123 break;
3124 }
3125
3126 case Instruction::SUB_DOUBLE: {
3127 Binop_23x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3128 break;
3129 }
3130
3131 case Instruction::ADD_INT_2ADDR: {
3132 Binop_12x<HAdd>(instruction, DataType::Type::kInt32, dex_pc);
3133 break;
3134 }
3135
3136 case Instruction::MUL_INT: {
3137 Binop_23x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3138 break;
3139 }
3140
3141 case Instruction::MUL_LONG: {
3142 Binop_23x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3143 break;
3144 }
3145
3146 case Instruction::MUL_FLOAT: {
3147 Binop_23x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3148 break;
3149 }
3150
3151 case Instruction::MUL_DOUBLE: {
3152 Binop_23x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3153 break;
3154 }
3155
3156 case Instruction::DIV_INT: {
3157 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3158 dex_pc, DataType::Type::kInt32, false, true);
3159 break;
3160 }
3161
3162 case Instruction::DIV_LONG: {
3163 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3164 dex_pc, DataType::Type::kInt64, false, true);
3165 break;
3166 }
3167
3168 case Instruction::DIV_FLOAT: {
3169 Binop_23x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3170 break;
3171 }
3172
3173 case Instruction::DIV_DOUBLE: {
3174 Binop_23x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3175 break;
3176 }
3177
3178 case Instruction::REM_INT: {
3179 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3180 dex_pc, DataType::Type::kInt32, false, false);
3181 break;
3182 }
3183
3184 case Instruction::REM_LONG: {
3185 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3186 dex_pc, DataType::Type::kInt64, false, false);
3187 break;
3188 }
3189
3190 case Instruction::REM_FLOAT: {
3191 Binop_23x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3192 break;
3193 }
3194
3195 case Instruction::REM_DOUBLE: {
3196 Binop_23x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3197 break;
3198 }
3199
3200 case Instruction::AND_INT: {
3201 Binop_23x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3202 break;
3203 }
3204
3205 case Instruction::AND_LONG: {
3206 Binop_23x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3207 break;
3208 }
3209
3210 case Instruction::SHL_INT: {
3211 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3212 break;
3213 }
3214
3215 case Instruction::SHL_LONG: {
3216 Binop_23x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3217 break;
3218 }
3219
3220 case Instruction::SHR_INT: {
3221 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3222 break;
3223 }
3224
3225 case Instruction::SHR_LONG: {
3226 Binop_23x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3227 break;
3228 }
3229
3230 case Instruction::USHR_INT: {
3231 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3232 break;
3233 }
3234
3235 case Instruction::USHR_LONG: {
3236 Binop_23x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3237 break;
3238 }
3239
3240 case Instruction::OR_INT: {
3241 Binop_23x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3242 break;
3243 }
3244
3245 case Instruction::OR_LONG: {
3246 Binop_23x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3247 break;
3248 }
3249
3250 case Instruction::XOR_INT: {
3251 Binop_23x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3252 break;
3253 }
3254
3255 case Instruction::XOR_LONG: {
3256 Binop_23x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3257 break;
3258 }
3259
3260 case Instruction::ADD_LONG_2ADDR: {
3261 Binop_12x<HAdd>(instruction, DataType::Type::kInt64, dex_pc);
3262 break;
3263 }
3264
3265 case Instruction::ADD_DOUBLE_2ADDR: {
3266 Binop_12x<HAdd>(instruction, DataType::Type::kFloat64, dex_pc);
3267 break;
3268 }
3269
3270 case Instruction::ADD_FLOAT_2ADDR: {
3271 Binop_12x<HAdd>(instruction, DataType::Type::kFloat32, dex_pc);
3272 break;
3273 }
3274
3275 case Instruction::SUB_INT_2ADDR: {
3276 Binop_12x<HSub>(instruction, DataType::Type::kInt32, dex_pc);
3277 break;
3278 }
3279
3280 case Instruction::SUB_LONG_2ADDR: {
3281 Binop_12x<HSub>(instruction, DataType::Type::kInt64, dex_pc);
3282 break;
3283 }
3284
3285 case Instruction::SUB_FLOAT_2ADDR: {
3286 Binop_12x<HSub>(instruction, DataType::Type::kFloat32, dex_pc);
3287 break;
3288 }
3289
3290 case Instruction::SUB_DOUBLE_2ADDR: {
3291 Binop_12x<HSub>(instruction, DataType::Type::kFloat64, dex_pc);
3292 break;
3293 }
3294
3295 case Instruction::MUL_INT_2ADDR: {
3296 Binop_12x<HMul>(instruction, DataType::Type::kInt32, dex_pc);
3297 break;
3298 }
3299
3300 case Instruction::MUL_LONG_2ADDR: {
3301 Binop_12x<HMul>(instruction, DataType::Type::kInt64, dex_pc);
3302 break;
3303 }
3304
3305 case Instruction::MUL_FLOAT_2ADDR: {
3306 Binop_12x<HMul>(instruction, DataType::Type::kFloat32, dex_pc);
3307 break;
3308 }
3309
3310 case Instruction::MUL_DOUBLE_2ADDR: {
3311 Binop_12x<HMul>(instruction, DataType::Type::kFloat64, dex_pc);
3312 break;
3313 }
3314
3315 case Instruction::DIV_INT_2ADDR: {
3316 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3317 dex_pc, DataType::Type::kInt32, false, true);
3318 break;
3319 }
3320
3321 case Instruction::DIV_LONG_2ADDR: {
3322 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3323 dex_pc, DataType::Type::kInt64, false, true);
3324 break;
3325 }
3326
3327 case Instruction::REM_INT_2ADDR: {
3328 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3329 dex_pc, DataType::Type::kInt32, false, false);
3330 break;
3331 }
3332
3333 case Instruction::REM_LONG_2ADDR: {
3334 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegA(), instruction.VRegB(),
3335 dex_pc, DataType::Type::kInt64, false, false);
3336 break;
3337 }
3338
3339 case Instruction::REM_FLOAT_2ADDR: {
3340 Binop_12x<HRem>(instruction, DataType::Type::kFloat32, dex_pc);
3341 break;
3342 }
3343
3344 case Instruction::REM_DOUBLE_2ADDR: {
3345 Binop_12x<HRem>(instruction, DataType::Type::kFloat64, dex_pc);
3346 break;
3347 }
3348
3349 case Instruction::SHL_INT_2ADDR: {
3350 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt32, dex_pc);
3351 break;
3352 }
3353
3354 case Instruction::SHL_LONG_2ADDR: {
3355 Binop_12x_shift<HShl>(instruction, DataType::Type::kInt64, dex_pc);
3356 break;
3357 }
3358
3359 case Instruction::SHR_INT_2ADDR: {
3360 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt32, dex_pc);
3361 break;
3362 }
3363
3364 case Instruction::SHR_LONG_2ADDR: {
3365 Binop_12x_shift<HShr>(instruction, DataType::Type::kInt64, dex_pc);
3366 break;
3367 }
3368
3369 case Instruction::USHR_INT_2ADDR: {
3370 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt32, dex_pc);
3371 break;
3372 }
3373
3374 case Instruction::USHR_LONG_2ADDR: {
3375 Binop_12x_shift<HUShr>(instruction, DataType::Type::kInt64, dex_pc);
3376 break;
3377 }
3378
3379 case Instruction::DIV_FLOAT_2ADDR: {
3380 Binop_12x<HDiv>(instruction, DataType::Type::kFloat32, dex_pc);
3381 break;
3382 }
3383
3384 case Instruction::DIV_DOUBLE_2ADDR: {
3385 Binop_12x<HDiv>(instruction, DataType::Type::kFloat64, dex_pc);
3386 break;
3387 }
3388
3389 case Instruction::AND_INT_2ADDR: {
3390 Binop_12x<HAnd>(instruction, DataType::Type::kInt32, dex_pc);
3391 break;
3392 }
3393
3394 case Instruction::AND_LONG_2ADDR: {
3395 Binop_12x<HAnd>(instruction, DataType::Type::kInt64, dex_pc);
3396 break;
3397 }
3398
3399 case Instruction::OR_INT_2ADDR: {
3400 Binop_12x<HOr>(instruction, DataType::Type::kInt32, dex_pc);
3401 break;
3402 }
3403
3404 case Instruction::OR_LONG_2ADDR: {
3405 Binop_12x<HOr>(instruction, DataType::Type::kInt64, dex_pc);
3406 break;
3407 }
3408
3409 case Instruction::XOR_INT_2ADDR: {
3410 Binop_12x<HXor>(instruction, DataType::Type::kInt32, dex_pc);
3411 break;
3412 }
3413
3414 case Instruction::XOR_LONG_2ADDR: {
3415 Binop_12x<HXor>(instruction, DataType::Type::kInt64, dex_pc);
3416 break;
3417 }
3418
3419 case Instruction::ADD_INT_LIT16: {
3420 Binop_22s<HAdd>(instruction, false, dex_pc);
3421 break;
3422 }
3423
3424 case Instruction::AND_INT_LIT16: {
3425 Binop_22s<HAnd>(instruction, false, dex_pc);
3426 break;
3427 }
3428
3429 case Instruction::OR_INT_LIT16: {
3430 Binop_22s<HOr>(instruction, false, dex_pc);
3431 break;
3432 }
3433
3434 case Instruction::XOR_INT_LIT16: {
3435 Binop_22s<HXor>(instruction, false, dex_pc);
3436 break;
3437 }
3438
3439 case Instruction::RSUB_INT: {
3440 Binop_22s<HSub>(instruction, true, dex_pc);
3441 break;
3442 }
3443
3444 case Instruction::MUL_INT_LIT16: {
3445 Binop_22s<HMul>(instruction, false, dex_pc);
3446 break;
3447 }
3448
3449 case Instruction::ADD_INT_LIT8: {
3450 Binop_22b<HAdd>(instruction, false, dex_pc);
3451 break;
3452 }
3453
3454 case Instruction::AND_INT_LIT8: {
3455 Binop_22b<HAnd>(instruction, false, dex_pc);
3456 break;
3457 }
3458
3459 case Instruction::OR_INT_LIT8: {
3460 Binop_22b<HOr>(instruction, false, dex_pc);
3461 break;
3462 }
3463
3464 case Instruction::XOR_INT_LIT8: {
3465 Binop_22b<HXor>(instruction, false, dex_pc);
3466 break;
3467 }
3468
3469 case Instruction::RSUB_INT_LIT8: {
3470 Binop_22b<HSub>(instruction, true, dex_pc);
3471 break;
3472 }
3473
3474 case Instruction::MUL_INT_LIT8: {
3475 Binop_22b<HMul>(instruction, false, dex_pc);
3476 break;
3477 }
3478
3479 case Instruction::DIV_INT_LIT16:
3480 case Instruction::DIV_INT_LIT8: {
3481 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3482 dex_pc, DataType::Type::kInt32, true, true);
3483 break;
3484 }
3485
3486 case Instruction::REM_INT_LIT16:
3487 case Instruction::REM_INT_LIT8: {
3488 BuildCheckedDivRem(instruction.VRegA(), instruction.VRegB(), instruction.VRegC(),
3489 dex_pc, DataType::Type::kInt32, true, false);
3490 break;
3491 }
3492
3493 case Instruction::SHL_INT_LIT8: {
3494 Binop_22b<HShl>(instruction, false, dex_pc);
3495 break;
3496 }
3497
3498 case Instruction::SHR_INT_LIT8: {
3499 Binop_22b<HShr>(instruction, false, dex_pc);
3500 break;
3501 }
3502
3503 case Instruction::USHR_INT_LIT8: {
3504 Binop_22b<HUShr>(instruction, false, dex_pc);
3505 break;
3506 }
3507
3508 case Instruction::NEW_INSTANCE: {
3509 HNewInstance* new_instance =
3510 BuildNewInstance(dex::TypeIndex(instruction.VRegB_21c()), dex_pc);
3511 DCHECK(new_instance != nullptr);
3512
3513 UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
3514 BuildConstructorFenceForAllocation(new_instance);
3515 break;
3516 }
3517
3518 case Instruction::NEW_ARRAY: {
3519 dex::TypeIndex type_index(instruction.VRegC_22c());
3520 HInstruction* length = LoadLocal(instruction.VRegB_22c(), DataType::Type::kInt32);
3521 HNewArray* new_array = BuildNewArray(dex_pc, type_index, length);
3522
3523 UpdateLocal(instruction.VRegA_22c(), current_block_->GetLastInstruction());
3524 BuildConstructorFenceForAllocation(new_array);
3525 break;
3526 }
3527
3528 case Instruction::FILLED_NEW_ARRAY: {
3529 dex::TypeIndex type_index(instruction.VRegB_35c());
3530 uint32_t args[5];
3531 uint32_t number_of_vreg_arguments = instruction.GetVarArgs(args);
3532 VarArgsInstructionOperands operands(args, number_of_vreg_arguments);
3533 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3534 BuildConstructorFenceForAllocation(new_array);
3535 break;
3536 }
3537
3538 case Instruction::FILLED_NEW_ARRAY_RANGE: {
3539 dex::TypeIndex type_index(instruction.VRegB_3rc());
3540 RangeInstructionOperands operands(instruction.VRegC_3rc(), instruction.VRegA_3rc());
3541 HNewArray* new_array = BuildFilledNewArray(dex_pc, type_index, operands);
3542 BuildConstructorFenceForAllocation(new_array);
3543 break;
3544 }
3545
3546 case Instruction::FILL_ARRAY_DATA: {
3547 BuildFillArrayData(instruction, dex_pc);
3548 break;
3549 }
3550
3551 case Instruction::MOVE_RESULT:
3552 case Instruction::MOVE_RESULT_WIDE:
3553 case Instruction::MOVE_RESULT_OBJECT: {
3554 DCHECK(latest_result_ != nullptr);
3555 UpdateLocal(instruction.VRegA(), latest_result_);
3556 latest_result_ = nullptr;
3557 break;
3558 }
3559
3560 case Instruction::CMP_LONG: {
3561 Binop_23x_cmp(instruction, DataType::Type::kInt64, ComparisonBias::kNoBias, dex_pc);
3562 break;
3563 }
3564
3565 case Instruction::CMPG_FLOAT: {
3566 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kGtBias, dex_pc);
3567 break;
3568 }
3569
3570 case Instruction::CMPG_DOUBLE: {
3571 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kGtBias, dex_pc);
3572 break;
3573 }
3574
3575 case Instruction::CMPL_FLOAT: {
3576 Binop_23x_cmp(instruction, DataType::Type::kFloat32, ComparisonBias::kLtBias, dex_pc);
3577 break;
3578 }
3579
3580 case Instruction::CMPL_DOUBLE: {
3581 Binop_23x_cmp(instruction, DataType::Type::kFloat64, ComparisonBias::kLtBias, dex_pc);
3582 break;
3583 }
3584
3585 case Instruction::NOP:
3586 break;
3587
3588 case Instruction::IGET:
3589 case Instruction::IGET_WIDE:
3590 case Instruction::IGET_OBJECT:
3591 case Instruction::IGET_BOOLEAN:
3592 case Instruction::IGET_BYTE:
3593 case Instruction::IGET_CHAR:
3594 case Instruction::IGET_SHORT: {
3595 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ false)) {
3596 return false;
3597 }
3598 break;
3599 }
3600
3601 case Instruction::IPUT:
3602 case Instruction::IPUT_WIDE:
3603 case Instruction::IPUT_OBJECT:
3604 case Instruction::IPUT_BOOLEAN:
3605 case Instruction::IPUT_BYTE:
3606 case Instruction::IPUT_CHAR:
3607 case Instruction::IPUT_SHORT: {
3608 if (!BuildInstanceFieldAccess(instruction, dex_pc, /* is_put= */ true)) {
3609 return false;
3610 }
3611 break;
3612 }
3613
3614 case Instruction::SGET:
3615 case Instruction::SGET_WIDE:
3616 case Instruction::SGET_OBJECT:
3617 case Instruction::SGET_BOOLEAN:
3618 case Instruction::SGET_BYTE:
3619 case Instruction::SGET_CHAR:
3620 case Instruction::SGET_SHORT: {
3621 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ false);
3622 break;
3623 }
3624
3625 case Instruction::SPUT:
3626 case Instruction::SPUT_WIDE:
3627 case Instruction::SPUT_OBJECT:
3628 case Instruction::SPUT_BOOLEAN:
3629 case Instruction::SPUT_BYTE:
3630 case Instruction::SPUT_CHAR:
3631 case Instruction::SPUT_SHORT: {
3632 BuildStaticFieldAccess(instruction, dex_pc, /* is_put= */ true);
3633 break;
3634 }
3635
3636 #define ARRAY_XX(kind, anticipated_type) \
3637 case Instruction::AGET##kind: { \
3638 BuildArrayAccess(instruction, dex_pc, false, anticipated_type); \
3639 break; \
3640 } \
3641 case Instruction::APUT##kind: { \
3642 BuildArrayAccess(instruction, dex_pc, true, anticipated_type); \
3643 break; \
3644 }
3645
3646 ARRAY_XX(, DataType::Type::kInt32);
3647 ARRAY_XX(_WIDE, DataType::Type::kInt64);
3648 ARRAY_XX(_OBJECT, DataType::Type::kReference);
3649 ARRAY_XX(_BOOLEAN, DataType::Type::kBool);
3650 ARRAY_XX(_BYTE, DataType::Type::kInt8);
3651 ARRAY_XX(_CHAR, DataType::Type::kUint16);
3652 ARRAY_XX(_SHORT, DataType::Type::kInt16);
3653
3654 case Instruction::ARRAY_LENGTH: {
3655 HInstruction* object = LoadNullCheckedLocal(instruction.VRegB_12x(), dex_pc);
3656 AppendInstruction(new (allocator_) HArrayLength(object, dex_pc));
3657 UpdateLocal(instruction.VRegA_12x(), current_block_->GetLastInstruction());
3658 break;
3659 }
3660
3661 case Instruction::CONST_STRING: {
3662 dex::StringIndex string_index(instruction.VRegB_21c());
3663 BuildLoadString(string_index, dex_pc);
3664 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3665 break;
3666 }
3667
3668 case Instruction::CONST_STRING_JUMBO: {
3669 dex::StringIndex string_index(instruction.VRegB_31c());
3670 BuildLoadString(string_index, dex_pc);
3671 UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
3672 break;
3673 }
3674
3675 case Instruction::CONST_CLASS: {
3676 dex::TypeIndex type_index(instruction.VRegB_21c());
3677 BuildLoadClass(type_index, dex_pc);
3678 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3679 break;
3680 }
3681
3682 case Instruction::CONST_METHOD_HANDLE: {
3683 uint16_t method_handle_idx = instruction.VRegB_21c();
3684 BuildLoadMethodHandle(method_handle_idx, dex_pc);
3685 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3686 break;
3687 }
3688
3689 case Instruction::CONST_METHOD_TYPE: {
3690 dex::ProtoIndex proto_idx(instruction.VRegB_21c());
3691 BuildLoadMethodType(proto_idx, dex_pc);
3692 UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
3693 break;
3694 }
3695
3696 case Instruction::MOVE_EXCEPTION: {
3697 AppendInstruction(new (allocator_) HLoadException(dex_pc));
3698 UpdateLocal(instruction.VRegA_11x(), current_block_->GetLastInstruction());
3699 AppendInstruction(new (allocator_) HClearException(dex_pc));
3700 break;
3701 }
3702
3703 case Instruction::THROW: {
3704 HInstruction* exception = LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference);
3705 AppendInstruction(new (allocator_) HThrow(exception, dex_pc));
3706 // We finished building this block. Set the current block to null to avoid
3707 // adding dead instructions to it.
3708 current_block_ = nullptr;
3709 break;
3710 }
3711
3712 case Instruction::INSTANCE_OF: {
3713 uint8_t destination = instruction.VRegA_22c();
3714 uint8_t reference = instruction.VRegB_22c();
3715 dex::TypeIndex type_index(instruction.VRegC_22c());
3716 BuildTypeCheck(instruction, destination, reference, type_index, dex_pc);
3717 break;
3718 }
3719
3720 case Instruction::CHECK_CAST: {
3721 uint8_t reference = instruction.VRegA_21c();
3722 dex::TypeIndex type_index(instruction.VRegB_21c());
3723 BuildTypeCheck(instruction, -1, reference, type_index, dex_pc);
3724 break;
3725 }
3726
3727 case Instruction::MONITOR_ENTER: {
3728 AppendInstruction(new (allocator_) HMonitorOperation(
3729 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3730 HMonitorOperation::OperationKind::kEnter,
3731 dex_pc));
3732 graph_->SetHasMonitorOperations(true);
3733 break;
3734 }
3735
3736 case Instruction::MONITOR_EXIT: {
3737 AppendInstruction(new (allocator_) HMonitorOperation(
3738 LoadLocal(instruction.VRegA_11x(), DataType::Type::kReference),
3739 HMonitorOperation::OperationKind::kExit,
3740 dex_pc));
3741 graph_->SetHasMonitorOperations(true);
3742 break;
3743 }
3744
3745 case Instruction::SPARSE_SWITCH:
3746 case Instruction::PACKED_SWITCH: {
3747 BuildSwitch(instruction, dex_pc);
3748 break;
3749 }
3750
3751 case Instruction::UNUSED_3E ... Instruction::UNUSED_43:
3752 case Instruction::UNUSED_73:
3753 case Instruction::UNUSED_79:
3754 case Instruction::UNUSED_7A:
3755 case Instruction::UNUSED_E3 ... Instruction::UNUSED_F9: {
3756 VLOG(compiler) << "Did not compile "
3757 << dex_file_->PrettyMethod(dex_compilation_unit_->GetDexMethodIndex())
3758 << " because of unhandled instruction "
3759 << instruction.Name();
3760 MaybeRecordStat(compilation_stats_,
3761 MethodCompilationStat::kNotCompiledUnhandledInstruction);
3762 return false;
3763 }
3764 }
3765 return true;
3766 } // NOLINT(readability/fn_size)
3767
LookupResolvedType(dex::TypeIndex type_index,const DexCompilationUnit & compilation_unit) const3768 ObjPtr<mirror::Class> HInstructionBuilder::LookupResolvedType(
3769 dex::TypeIndex type_index,
3770 const DexCompilationUnit& compilation_unit) const {
3771 return compilation_unit.GetClassLinker()->LookupResolvedType(
3772 type_index, compilation_unit.GetDexCache().Get(), compilation_unit.GetClassLoader().Get());
3773 }
3774
LookupReferrerClass() const3775 ObjPtr<mirror::Class> HInstructionBuilder::LookupReferrerClass() const {
3776 // TODO: Cache the result in a Handle<mirror::Class>.
3777 const dex::MethodId& method_id =
3778 dex_compilation_unit_->GetDexFile()->GetMethodId(dex_compilation_unit_->GetDexMethodIndex());
3779 return LookupResolvedType(method_id.class_idx_, *dex_compilation_unit_);
3780 }
3781
3782 } // namespace art
3783