1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "inliner.h"
18 
19 #include "art_method-inl.h"
20 #include "builder.h"
21 #include "class_linker.h"
22 #include "constant_folding.h"
23 #include "dead_code_elimination.h"
24 #include "dex/verified_method.h"
25 #include "dex/verification_results.h"
26 #include "driver/compiler_driver-inl.h"
27 #include "driver/compiler_options.h"
28 #include "driver/dex_compilation_unit.h"
29 #include "instruction_simplifier.h"
30 #include "intrinsics.h"
31 #include "jit/jit.h"
32 #include "jit/jit_code_cache.h"
33 #include "mirror/class_loader.h"
34 #include "mirror/dex_cache.h"
35 #include "nodes.h"
36 #include "optimizing_compiler.h"
37 #include "reference_type_propagation.h"
38 #include "register_allocator.h"
39 #include "quick/inline_method_analyser.h"
40 #include "sharpening.h"
41 #include "ssa_builder.h"
42 #include "ssa_phi_elimination.h"
43 #include "scoped_thread_state_change.h"
44 #include "thread.h"
45 
46 namespace art {
47 
48 static constexpr size_t kMaximumNumberOfHInstructions = 32;
49 
50 // Limit the number of dex registers that we accumulate while inlining
51 // to avoid creating large amount of nested environments.
52 static constexpr size_t kMaximumNumberOfCumulatedDexRegisters = 64;
53 
54 // Avoid inlining within a huge method due to memory pressure.
55 static constexpr size_t kMaximumCodeUnitSize = 4096;
56 
Run()57 void HInliner::Run() {
58   const CompilerOptions& compiler_options = compiler_driver_->GetCompilerOptions();
59   if ((compiler_options.GetInlineDepthLimit() == 0)
60       || (compiler_options.GetInlineMaxCodeUnits() == 0)) {
61     return;
62   }
63   if (caller_compilation_unit_.GetCodeItem()->insns_size_in_code_units_ > kMaximumCodeUnitSize) {
64     return;
65   }
66   if (graph_->IsDebuggable()) {
67     // For simplicity, we currently never inline when the graph is debuggable. This avoids
68     // doing some logic in the runtime to discover if a method could have been inlined.
69     return;
70   }
71   const ArenaVector<HBasicBlock*>& blocks = graph_->GetReversePostOrder();
72   DCHECK(!blocks.empty());
73   HBasicBlock* next_block = blocks[0];
74   for (size_t i = 0; i < blocks.size(); ++i) {
75     // Because we are changing the graph when inlining, we need to remember the next block.
76     // This avoids doing the inlining work again on the inlined blocks.
77     if (blocks[i] != next_block) {
78       continue;
79     }
80     HBasicBlock* block = next_block;
81     next_block = (i == blocks.size() - 1) ? nullptr : blocks[i + 1];
82     for (HInstruction* instruction = block->GetFirstInstruction(); instruction != nullptr;) {
83       HInstruction* next = instruction->GetNext();
84       HInvoke* call = instruction->AsInvoke();
85       // As long as the call is not intrinsified, it is worth trying to inline.
86       if (call != nullptr && call->GetIntrinsic() == Intrinsics::kNone) {
87         // We use the original invoke type to ensure the resolution of the called method
88         // works properly.
89         if (!TryInline(call)) {
90           if (kIsDebugBuild && IsCompilingWithCoreImage()) {
91             std::string callee_name =
92                 PrettyMethod(call->GetDexMethodIndex(), *outer_compilation_unit_.GetDexFile());
93             bool should_inline = callee_name.find("$inline$") != std::string::npos;
94             CHECK(!should_inline) << "Could not inline " << callee_name;
95           }
96         } else {
97           if (kIsDebugBuild && IsCompilingWithCoreImage()) {
98             std::string callee_name =
99                 PrettyMethod(call->GetDexMethodIndex(), *outer_compilation_unit_.GetDexFile());
100             bool must_not_inline = callee_name.find("$noinline$") != std::string::npos;
101             CHECK(!must_not_inline) << "Should not have inlined " << callee_name;
102           }
103         }
104       }
105       instruction = next;
106     }
107   }
108 }
109 
IsMethodOrDeclaringClassFinal(ArtMethod * method)110 static bool IsMethodOrDeclaringClassFinal(ArtMethod* method)
111     SHARED_REQUIRES(Locks::mutator_lock_) {
112   return method->IsFinal() || method->GetDeclaringClass()->IsFinal();
113 }
114 
115 /**
116  * Given the `resolved_method` looked up in the dex cache, try to find
117  * the actual runtime target of an interface or virtual call.
118  * Return nullptr if the runtime target cannot be proven.
119  */
FindVirtualOrInterfaceTarget(HInvoke * invoke,ArtMethod * resolved_method)120 static ArtMethod* FindVirtualOrInterfaceTarget(HInvoke* invoke, ArtMethod* resolved_method)
121     SHARED_REQUIRES(Locks::mutator_lock_) {
122   if (IsMethodOrDeclaringClassFinal(resolved_method)) {
123     // No need to lookup further, the resolved method will be the target.
124     return resolved_method;
125   }
126 
127   HInstruction* receiver = invoke->InputAt(0);
128   if (receiver->IsNullCheck()) {
129     // Due to multiple levels of inlining within the same pass, it might be that
130     // null check does not have the reference type of the actual receiver.
131     receiver = receiver->InputAt(0);
132   }
133   ReferenceTypeInfo info = receiver->GetReferenceTypeInfo();
134   DCHECK(info.IsValid()) << "Invalid RTI for " << receiver->DebugName();
135   if (!info.IsExact()) {
136     // We currently only support inlining with known receivers.
137     // TODO: Remove this check, we should be able to inline final methods
138     // on unknown receivers.
139     return nullptr;
140   } else if (info.GetTypeHandle()->IsInterface()) {
141     // Statically knowing that the receiver has an interface type cannot
142     // help us find what is the target method.
143     return nullptr;
144   } else if (!resolved_method->GetDeclaringClass()->IsAssignableFrom(info.GetTypeHandle().Get())) {
145     // The method that we're trying to call is not in the receiver's class or super classes.
146     return nullptr;
147   } else if (info.GetTypeHandle()->IsErroneous()) {
148     // If the type is erroneous, do not go further, as we are going to query the vtable or
149     // imt table, that we can only safely do on non-erroneous classes.
150     return nullptr;
151   }
152 
153   ClassLinker* cl = Runtime::Current()->GetClassLinker();
154   size_t pointer_size = cl->GetImagePointerSize();
155   if (invoke->IsInvokeInterface()) {
156     resolved_method = info.GetTypeHandle()->FindVirtualMethodForInterface(
157         resolved_method, pointer_size);
158   } else {
159     DCHECK(invoke->IsInvokeVirtual());
160     resolved_method = info.GetTypeHandle()->FindVirtualMethodForVirtual(
161         resolved_method, pointer_size);
162   }
163 
164   if (resolved_method == nullptr) {
165     // The information we had on the receiver was not enough to find
166     // the target method. Since we check above the exact type of the receiver,
167     // the only reason this can happen is an IncompatibleClassChangeError.
168     return nullptr;
169   } else if (!resolved_method->IsInvokable()) {
170     // The information we had on the receiver was not enough to find
171     // the target method. Since we check above the exact type of the receiver,
172     // the only reason this can happen is an IncompatibleClassChangeError.
173     return nullptr;
174   } else if (IsMethodOrDeclaringClassFinal(resolved_method)) {
175     // A final method has to be the target method.
176     return resolved_method;
177   } else if (info.IsExact()) {
178     // If we found a method and the receiver's concrete type is statically
179     // known, we know for sure the target.
180     return resolved_method;
181   } else {
182     // Even if we did find a method, the receiver type was not enough to
183     // statically find the runtime target.
184     return nullptr;
185   }
186 }
187 
FindClassIndexIn(mirror::Class * cls,const DexFile & dex_file,Handle<mirror::DexCache> dex_cache)188 static uint32_t FindClassIndexIn(mirror::Class* cls,
189                                  const DexFile& dex_file,
190                                  Handle<mirror::DexCache> dex_cache)
191     SHARED_REQUIRES(Locks::mutator_lock_) {
192   uint32_t index = DexFile::kDexNoIndex;
193   if (cls->GetDexCache() == nullptr) {
194     DCHECK(cls->IsArrayClass()) << PrettyClass(cls);
195     index = cls->FindTypeIndexInOtherDexFile(dex_file);
196   } else if (cls->GetDexTypeIndex() == DexFile::kDexNoIndex16) {
197     DCHECK(cls->IsProxyClass()) << PrettyClass(cls);
198     // TODO: deal with proxy classes.
199   } else if (IsSameDexFile(cls->GetDexFile(), dex_file)) {
200     DCHECK_EQ(cls->GetDexCache(), dex_cache.Get());
201     index = cls->GetDexTypeIndex();
202     // Update the dex cache to ensure the class is in. The generated code will
203     // consider it is. We make it safe by updating the dex cache, as other
204     // dex files might also load the class, and there is no guarantee the dex
205     // cache of the dex file of the class will be updated.
206     if (dex_cache->GetResolvedType(index) == nullptr) {
207       dex_cache->SetResolvedType(index, cls);
208     }
209   } else {
210     index = cls->FindTypeIndexInOtherDexFile(dex_file);
211     // We cannot guarantee the entry in the dex cache will resolve to the same class,
212     // as there may be different class loaders. So only return the index if it's
213     // the right class in the dex cache already.
214     if (index != DexFile::kDexNoIndex && dex_cache->GetResolvedType(index) != cls) {
215       index = DexFile::kDexNoIndex;
216     }
217   }
218 
219   return index;
220 }
221 
222 class ScopedProfilingInfoInlineUse {
223  public:
ScopedProfilingInfoInlineUse(ArtMethod * method,Thread * self)224   explicit ScopedProfilingInfoInlineUse(ArtMethod* method, Thread* self)
225       : method_(method),
226         self_(self),
227         // Fetch the profiling info ahead of using it. If it's null when fetching,
228         // we should not call JitCodeCache::DoneInlining.
229         profiling_info_(
230             Runtime::Current()->GetJit()->GetCodeCache()->NotifyCompilerUse(method, self)) {
231   }
232 
~ScopedProfilingInfoInlineUse()233   ~ScopedProfilingInfoInlineUse() {
234     if (profiling_info_ != nullptr) {
235       size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
236       DCHECK_EQ(profiling_info_, method_->GetProfilingInfo(pointer_size));
237       Runtime::Current()->GetJit()->GetCodeCache()->DoneCompilerUse(method_, self_);
238     }
239   }
240 
GetProfilingInfo() const241   ProfilingInfo* GetProfilingInfo() const { return profiling_info_; }
242 
243  private:
244   ArtMethod* const method_;
245   Thread* const self_;
246   ProfilingInfo* const profiling_info_;
247 };
248 
TryInline(HInvoke * invoke_instruction)249 bool HInliner::TryInline(HInvoke* invoke_instruction) {
250   if (invoke_instruction->IsInvokeUnresolved()) {
251     return false;  // Don't bother to move further if we know the method is unresolved.
252   }
253 
254   uint32_t method_index = invoke_instruction->GetDexMethodIndex();
255   ScopedObjectAccess soa(Thread::Current());
256   const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
257   VLOG(compiler) << "Try inlining " << PrettyMethod(method_index, caller_dex_file);
258 
259   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
260   // We can query the dex cache directly. The verifier has populated it already.
261   ArtMethod* resolved_method;
262   ArtMethod* actual_method = nullptr;
263   if (invoke_instruction->IsInvokeStaticOrDirect()) {
264     if (invoke_instruction->AsInvokeStaticOrDirect()->IsStringInit()) {
265       VLOG(compiler) << "Not inlining a String.<init> method";
266       return false;
267     }
268     MethodReference ref = invoke_instruction->AsInvokeStaticOrDirect()->GetTargetMethod();
269     mirror::DexCache* const dex_cache = IsSameDexFile(caller_dex_file, *ref.dex_file)
270         ? caller_compilation_unit_.GetDexCache().Get()
271         : class_linker->FindDexCache(soa.Self(), *ref.dex_file);
272     resolved_method = dex_cache->GetResolvedMethod(
273         ref.dex_method_index, class_linker->GetImagePointerSize());
274     // actual_method == resolved_method for direct or static calls.
275     actual_method = resolved_method;
276   } else {
277     resolved_method = caller_compilation_unit_.GetDexCache().Get()->GetResolvedMethod(
278         method_index, class_linker->GetImagePointerSize());
279     if (resolved_method != nullptr) {
280       // Check if we can statically find the method.
281       actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method);
282     }
283   }
284 
285   if (resolved_method == nullptr) {
286     // TODO: Can this still happen?
287     // Method cannot be resolved if it is in another dex file we do not have access to.
288     VLOG(compiler) << "Method cannot be resolved " << PrettyMethod(method_index, caller_dex_file);
289     return false;
290   }
291 
292   if (actual_method != nullptr) {
293     bool result = TryInlineAndReplace(invoke_instruction, actual_method, /* do_rtp */ true);
294     if (result && !invoke_instruction->IsInvokeStaticOrDirect()) {
295       MaybeRecordStat(kInlinedInvokeVirtualOrInterface);
296     }
297     return result;
298   }
299 
300   DCHECK(!invoke_instruction->IsInvokeStaticOrDirect());
301 
302   // Check if we can use an inline cache.
303   ArtMethod* caller = graph_->GetArtMethod();
304   if (Runtime::Current()->UseJitCompilation()) {
305     // Under JIT, we should always know the caller.
306     DCHECK(caller != nullptr);
307     ScopedProfilingInfoInlineUse spiis(caller, soa.Self());
308     ProfilingInfo* profiling_info = spiis.GetProfilingInfo();
309     if (profiling_info != nullptr) {
310       const InlineCache& ic = *profiling_info->GetInlineCache(invoke_instruction->GetDexPc());
311       if (ic.IsUninitialized()) {
312         VLOG(compiler) << "Interface or virtual call to "
313                        << PrettyMethod(method_index, caller_dex_file)
314                        << " is not hit and not inlined";
315         return false;
316       } else if (ic.IsMonomorphic()) {
317         MaybeRecordStat(kMonomorphicCall);
318         if (outermost_graph_->IsCompilingOsr()) {
319           // If we are compiling OSR, we pretend this call is polymorphic, as we may come from the
320           // interpreter and it may have seen different receiver types.
321           return TryInlinePolymorphicCall(invoke_instruction, resolved_method, ic);
322         } else {
323           return TryInlineMonomorphicCall(invoke_instruction, resolved_method, ic);
324         }
325       } else if (ic.IsPolymorphic()) {
326         MaybeRecordStat(kPolymorphicCall);
327         return TryInlinePolymorphicCall(invoke_instruction, resolved_method, ic);
328       } else {
329         DCHECK(ic.IsMegamorphic());
330         VLOG(compiler) << "Interface or virtual call to "
331                        << PrettyMethod(method_index, caller_dex_file)
332                        << " is megamorphic and not inlined";
333         MaybeRecordStat(kMegamorphicCall);
334         return false;
335       }
336     }
337   }
338 
339   VLOG(compiler) << "Interface or virtual call to "
340                  << PrettyMethod(method_index, caller_dex_file)
341                  << " could not be statically determined";
342   return false;
343 }
344 
BuildGetReceiverClass(ClassLinker * class_linker,HInstruction * receiver,uint32_t dex_pc) const345 HInstanceFieldGet* HInliner::BuildGetReceiverClass(ClassLinker* class_linker,
346                                                    HInstruction* receiver,
347                                                    uint32_t dex_pc) const {
348   ArtField* field = class_linker->GetClassRoot(ClassLinker::kJavaLangObject)->GetInstanceField(0);
349   DCHECK_EQ(std::string(field->GetName()), "shadow$_klass_");
350   HInstanceFieldGet* result = new (graph_->GetArena()) HInstanceFieldGet(
351       receiver,
352       Primitive::kPrimNot,
353       field->GetOffset(),
354       field->IsVolatile(),
355       field->GetDexFieldIndex(),
356       field->GetDeclaringClass()->GetDexClassDefIndex(),
357       *field->GetDexFile(),
358       handles_->NewHandle(field->GetDexCache()),
359       dex_pc);
360   // The class of a field is effectively final, and does not have any memory dependencies.
361   result->SetSideEffects(SideEffects::None());
362   return result;
363 }
364 
TryInlineMonomorphicCall(HInvoke * invoke_instruction,ArtMethod * resolved_method,const InlineCache & ic)365 bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction,
366                                         ArtMethod* resolved_method,
367                                         const InlineCache& ic) {
368   DCHECK(invoke_instruction->IsInvokeVirtual() || invoke_instruction->IsInvokeInterface())
369       << invoke_instruction->DebugName();
370 
371   const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
372   uint32_t class_index = FindClassIndexIn(
373       ic.GetMonomorphicType(), caller_dex_file, caller_compilation_unit_.GetDexCache());
374   if (class_index == DexFile::kDexNoIndex) {
375     VLOG(compiler) << "Call to " << PrettyMethod(resolved_method)
376                    << " from inline cache is not inlined because its class is not"
377                    << " accessible to the caller";
378     return false;
379   }
380 
381   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
382   size_t pointer_size = class_linker->GetImagePointerSize();
383   if (invoke_instruction->IsInvokeInterface()) {
384     resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForInterface(
385         resolved_method, pointer_size);
386   } else {
387     DCHECK(invoke_instruction->IsInvokeVirtual());
388     resolved_method = ic.GetMonomorphicType()->FindVirtualMethodForVirtual(
389         resolved_method, pointer_size);
390   }
391   DCHECK(resolved_method != nullptr);
392   HInstruction* receiver = invoke_instruction->InputAt(0);
393   HInstruction* cursor = invoke_instruction->GetPrevious();
394   HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
395 
396   if (!TryInlineAndReplace(invoke_instruction, resolved_method, /* do_rtp */ false)) {
397     return false;
398   }
399 
400   // We successfully inlined, now add a guard.
401   bool is_referrer =
402       (ic.GetMonomorphicType() == outermost_graph_->GetArtMethod()->GetDeclaringClass());
403   AddTypeGuard(receiver,
404                cursor,
405                bb_cursor,
406                class_index,
407                is_referrer,
408                invoke_instruction,
409                /* with_deoptimization */ true);
410 
411   // Run type propagation to get the guard typed, and eventually propagate the
412   // type of the receiver.
413   ReferenceTypePropagation rtp_fixup(graph_,
414                                      outer_compilation_unit_.GetDexCache(),
415                                      handles_,
416                                      /* is_first_run */ false);
417   rtp_fixup.Run();
418 
419   MaybeRecordStat(kInlinedMonomorphicCall);
420   return true;
421 }
422 
AddTypeGuard(HInstruction * receiver,HInstruction * cursor,HBasicBlock * bb_cursor,uint32_t class_index,bool is_referrer,HInstruction * invoke_instruction,bool with_deoptimization)423 HInstruction* HInliner::AddTypeGuard(HInstruction* receiver,
424                                      HInstruction* cursor,
425                                      HBasicBlock* bb_cursor,
426                                      uint32_t class_index,
427                                      bool is_referrer,
428                                      HInstruction* invoke_instruction,
429                                      bool with_deoptimization) {
430   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
431   HInstanceFieldGet* receiver_class = BuildGetReceiverClass(
432       class_linker, receiver, invoke_instruction->GetDexPc());
433 
434   const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
435   // Note that we will just compare the classes, so we don't need Java semantics access checks.
436   // Also, the caller of `AddTypeGuard` must have guaranteed that the class is in the dex cache.
437   HLoadClass* load_class = new (graph_->GetArena()) HLoadClass(graph_->GetCurrentMethod(),
438                                                                class_index,
439                                                                caller_dex_file,
440                                                                is_referrer,
441                                                                invoke_instruction->GetDexPc(),
442                                                                /* needs_access_check */ false,
443                                                                /* is_in_dex_cache */ true);
444 
445   HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, receiver_class);
446   // TODO: Extend reference type propagation to understand the guard.
447   if (cursor != nullptr) {
448     bb_cursor->InsertInstructionAfter(receiver_class, cursor);
449   } else {
450     bb_cursor->InsertInstructionBefore(receiver_class, bb_cursor->GetFirstInstruction());
451   }
452   bb_cursor->InsertInstructionAfter(load_class, receiver_class);
453   bb_cursor->InsertInstructionAfter(compare, load_class);
454   if (with_deoptimization) {
455     HDeoptimize* deoptimize = new (graph_->GetArena()) HDeoptimize(
456         compare, invoke_instruction->GetDexPc());
457     bb_cursor->InsertInstructionAfter(deoptimize, compare);
458     deoptimize->CopyEnvironmentFrom(invoke_instruction->GetEnvironment());
459   }
460   return compare;
461 }
462 
TryInlinePolymorphicCall(HInvoke * invoke_instruction,ArtMethod * resolved_method,const InlineCache & ic)463 bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction,
464                                         ArtMethod* resolved_method,
465                                         const InlineCache& ic) {
466   DCHECK(invoke_instruction->IsInvokeVirtual() || invoke_instruction->IsInvokeInterface())
467       << invoke_instruction->DebugName();
468 
469   if (TryInlinePolymorphicCallToSameTarget(invoke_instruction, resolved_method, ic)) {
470     return true;
471   }
472 
473   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
474   size_t pointer_size = class_linker->GetImagePointerSize();
475   const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
476 
477   bool all_targets_inlined = true;
478   bool one_target_inlined = false;
479   for (size_t i = 0; i < InlineCache::kIndividualCacheSize; ++i) {
480     if (ic.GetTypeAt(i) == nullptr) {
481       break;
482     }
483     ArtMethod* method = nullptr;
484     if (invoke_instruction->IsInvokeInterface()) {
485       method = ic.GetTypeAt(i)->FindVirtualMethodForInterface(
486           resolved_method, pointer_size);
487     } else {
488       DCHECK(invoke_instruction->IsInvokeVirtual());
489       method = ic.GetTypeAt(i)->FindVirtualMethodForVirtual(
490           resolved_method, pointer_size);
491     }
492 
493     HInstruction* receiver = invoke_instruction->InputAt(0);
494     HInstruction* cursor = invoke_instruction->GetPrevious();
495     HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
496 
497     uint32_t class_index = FindClassIndexIn(
498         ic.GetTypeAt(i), caller_dex_file, caller_compilation_unit_.GetDexCache());
499     HInstruction* return_replacement = nullptr;
500     if (class_index == DexFile::kDexNoIndex ||
501         !TryBuildAndInline(invoke_instruction, method, &return_replacement)) {
502       all_targets_inlined = false;
503     } else {
504       one_target_inlined = true;
505       bool is_referrer = (ic.GetTypeAt(i) == outermost_graph_->GetArtMethod()->GetDeclaringClass());
506 
507       // If we have inlined all targets before, and this receiver is the last seen,
508       // we deoptimize instead of keeping the original invoke instruction.
509       bool deoptimize = all_targets_inlined &&
510           (i != InlineCache::kIndividualCacheSize - 1) &&
511           (ic.GetTypeAt(i + 1) == nullptr);
512 
513       if (outermost_graph_->IsCompilingOsr()) {
514         // We do not support HDeoptimize in OSR methods.
515         deoptimize = false;
516       }
517       HInstruction* compare = AddTypeGuard(
518           receiver, cursor, bb_cursor, class_index, is_referrer, invoke_instruction, deoptimize);
519       if (deoptimize) {
520         if (return_replacement != nullptr) {
521           invoke_instruction->ReplaceWith(return_replacement);
522         }
523         invoke_instruction->GetBlock()->RemoveInstruction(invoke_instruction);
524         // Because the inline cache data can be populated concurrently, we force the end of the
525         // iteration. Otherhwise, we could see a new receiver type.
526         break;
527       } else {
528         CreateDiamondPatternForPolymorphicInline(compare, return_replacement, invoke_instruction);
529       }
530     }
531   }
532 
533   if (!one_target_inlined) {
534     VLOG(compiler) << "Call to " << PrettyMethod(resolved_method)
535                    << " from inline cache is not inlined because none"
536                    << " of its targets could be inlined";
537     return false;
538   }
539   MaybeRecordStat(kInlinedPolymorphicCall);
540 
541   // Run type propagation to get the guards typed.
542   ReferenceTypePropagation rtp_fixup(graph_,
543                                      outer_compilation_unit_.GetDexCache(),
544                                      handles_,
545                                      /* is_first_run */ false);
546   rtp_fixup.Run();
547   return true;
548 }
549 
CreateDiamondPatternForPolymorphicInline(HInstruction * compare,HInstruction * return_replacement,HInstruction * invoke_instruction)550 void HInliner::CreateDiamondPatternForPolymorphicInline(HInstruction* compare,
551                                                         HInstruction* return_replacement,
552                                                         HInstruction* invoke_instruction) {
553   uint32_t dex_pc = invoke_instruction->GetDexPc();
554   HBasicBlock* cursor_block = compare->GetBlock();
555   HBasicBlock* original_invoke_block = invoke_instruction->GetBlock();
556   ArenaAllocator* allocator = graph_->GetArena();
557 
558   // Spit the block after the compare: `cursor_block` will now be the start of the diamond,
559   // and the returned block is the start of the then branch (that could contain multiple blocks).
560   HBasicBlock* then = cursor_block->SplitAfterForInlining(compare);
561 
562   // Split the block containing the invoke before and after the invoke. The returned block
563   // of the split before will contain the invoke and will be the otherwise branch of
564   // the diamond. The returned block of the split after will be the merge block
565   // of the diamond.
566   HBasicBlock* end_then = invoke_instruction->GetBlock();
567   HBasicBlock* otherwise = end_then->SplitBeforeForInlining(invoke_instruction);
568   HBasicBlock* merge = otherwise->SplitAfterForInlining(invoke_instruction);
569 
570   // If the methods we are inlining return a value, we create a phi in the merge block
571   // that will have the `invoke_instruction and the `return_replacement` as inputs.
572   if (return_replacement != nullptr) {
573     HPhi* phi = new (allocator) HPhi(
574         allocator, kNoRegNumber, 0, HPhi::ToPhiType(invoke_instruction->GetType()), dex_pc);
575     merge->AddPhi(phi);
576     invoke_instruction->ReplaceWith(phi);
577     phi->AddInput(return_replacement);
578     phi->AddInput(invoke_instruction);
579   }
580 
581   // Add the control flow instructions.
582   otherwise->AddInstruction(new (allocator) HGoto(dex_pc));
583   end_then->AddInstruction(new (allocator) HGoto(dex_pc));
584   cursor_block->AddInstruction(new (allocator) HIf(compare, dex_pc));
585 
586   // Add the newly created blocks to the graph.
587   graph_->AddBlock(then);
588   graph_->AddBlock(otherwise);
589   graph_->AddBlock(merge);
590 
591   // Set up successor (and implictly predecessor) relations.
592   cursor_block->AddSuccessor(otherwise);
593   cursor_block->AddSuccessor(then);
594   end_then->AddSuccessor(merge);
595   otherwise->AddSuccessor(merge);
596 
597   // Set up dominance information.
598   then->SetDominator(cursor_block);
599   cursor_block->AddDominatedBlock(then);
600   otherwise->SetDominator(cursor_block);
601   cursor_block->AddDominatedBlock(otherwise);
602   merge->SetDominator(cursor_block);
603   cursor_block->AddDominatedBlock(merge);
604 
605   // Update the revert post order.
606   size_t index = IndexOfElement(graph_->reverse_post_order_, cursor_block);
607   MakeRoomFor(&graph_->reverse_post_order_, 1, index);
608   graph_->reverse_post_order_[++index] = then;
609   index = IndexOfElement(graph_->reverse_post_order_, end_then);
610   MakeRoomFor(&graph_->reverse_post_order_, 2, index);
611   graph_->reverse_post_order_[++index] = otherwise;
612   graph_->reverse_post_order_[++index] = merge;
613 
614 
615   graph_->UpdateLoopAndTryInformationOfNewBlock(
616       then, original_invoke_block, /* replace_if_back_edge */ false);
617   graph_->UpdateLoopAndTryInformationOfNewBlock(
618       otherwise, original_invoke_block, /* replace_if_back_edge */ false);
619 
620   // In case the original invoke location was a back edge, we need to update
621   // the loop to now have the merge block as a back edge.
622   graph_->UpdateLoopAndTryInformationOfNewBlock(
623       merge, original_invoke_block, /* replace_if_back_edge */ true);
624 }
625 
TryInlinePolymorphicCallToSameTarget(HInvoke * invoke_instruction,ArtMethod * resolved_method,const InlineCache & ic)626 bool HInliner::TryInlinePolymorphicCallToSameTarget(HInvoke* invoke_instruction,
627                                                     ArtMethod* resolved_method,
628                                                     const InlineCache& ic) {
629   // This optimization only works under JIT for now.
630   DCHECK(Runtime::Current()->UseJitCompilation());
631   if (graph_->GetInstructionSet() == kMips64) {
632     // TODO: Support HClassTableGet for mips64.
633     return false;
634   }
635   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
636   size_t pointer_size = class_linker->GetImagePointerSize();
637 
638   DCHECK(resolved_method != nullptr);
639   ArtMethod* actual_method = nullptr;
640   size_t method_index = invoke_instruction->IsInvokeVirtual()
641       ? invoke_instruction->AsInvokeVirtual()->GetVTableIndex()
642       : invoke_instruction->AsInvokeInterface()->GetImtIndex();
643 
644   // Check whether we are actually calling the same method among
645   // the different types seen.
646   for (size_t i = 0; i < InlineCache::kIndividualCacheSize; ++i) {
647     if (ic.GetTypeAt(i) == nullptr) {
648       break;
649     }
650     ArtMethod* new_method = nullptr;
651     if (invoke_instruction->IsInvokeInterface()) {
652       new_method = ic.GetTypeAt(i)->GetEmbeddedImTableEntry(
653           method_index % mirror::Class::kImtSize, pointer_size);
654       if (new_method->IsRuntimeMethod()) {
655         // Bail out as soon as we see a conflict trampoline in one of the target's
656         // interface table.
657         return false;
658       }
659     } else {
660       DCHECK(invoke_instruction->IsInvokeVirtual());
661       new_method = ic.GetTypeAt(i)->GetEmbeddedVTableEntry(method_index, pointer_size);
662     }
663     DCHECK(new_method != nullptr);
664     if (actual_method == nullptr) {
665       actual_method = new_method;
666     } else if (actual_method != new_method) {
667       // Different methods, bailout.
668       VLOG(compiler) << "Call to " << PrettyMethod(resolved_method)
669                      << " from inline cache is not inlined because it resolves"
670                      << " to different methods";
671       return false;
672     }
673   }
674 
675   HInstruction* receiver = invoke_instruction->InputAt(0);
676   HInstruction* cursor = invoke_instruction->GetPrevious();
677   HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
678 
679   HInstruction* return_replacement = nullptr;
680   if (!TryBuildAndInline(invoke_instruction, actual_method, &return_replacement)) {
681     return false;
682   }
683 
684   // We successfully inlined, now add a guard.
685   HInstanceFieldGet* receiver_class = BuildGetReceiverClass(
686       class_linker, receiver, invoke_instruction->GetDexPc());
687 
688   Primitive::Type type = Is64BitInstructionSet(graph_->GetInstructionSet())
689       ? Primitive::kPrimLong
690       : Primitive::kPrimInt;
691   HClassTableGet* class_table_get = new (graph_->GetArena()) HClassTableGet(
692       receiver_class,
693       type,
694       invoke_instruction->IsInvokeVirtual() ? HClassTableGet::TableKind::kVTable
695                                             : HClassTableGet::TableKind::kIMTable,
696       method_index,
697       invoke_instruction->GetDexPc());
698 
699   HConstant* constant;
700   if (type == Primitive::kPrimLong) {
701     constant = graph_->GetLongConstant(
702         reinterpret_cast<intptr_t>(actual_method), invoke_instruction->GetDexPc());
703   } else {
704     constant = graph_->GetIntConstant(
705         reinterpret_cast<intptr_t>(actual_method), invoke_instruction->GetDexPc());
706   }
707 
708   HNotEqual* compare = new (graph_->GetArena()) HNotEqual(class_table_get, constant);
709   if (cursor != nullptr) {
710     bb_cursor->InsertInstructionAfter(receiver_class, cursor);
711   } else {
712     bb_cursor->InsertInstructionBefore(receiver_class, bb_cursor->GetFirstInstruction());
713   }
714   bb_cursor->InsertInstructionAfter(class_table_get, receiver_class);
715   bb_cursor->InsertInstructionAfter(compare, class_table_get);
716 
717   if (outermost_graph_->IsCompilingOsr()) {
718     CreateDiamondPatternForPolymorphicInline(compare, return_replacement, invoke_instruction);
719   } else {
720     // TODO: Extend reference type propagation to understand the guard.
721     HDeoptimize* deoptimize = new (graph_->GetArena()) HDeoptimize(
722         compare, invoke_instruction->GetDexPc());
723     bb_cursor->InsertInstructionAfter(deoptimize, compare);
724     deoptimize->CopyEnvironmentFrom(invoke_instruction->GetEnvironment());
725     if (return_replacement != nullptr) {
726       invoke_instruction->ReplaceWith(return_replacement);
727     }
728     invoke_instruction->GetBlock()->RemoveInstruction(invoke_instruction);
729   }
730 
731   // Run type propagation to get the guard typed.
732   ReferenceTypePropagation rtp_fixup(graph_,
733                                      outer_compilation_unit_.GetDexCache(),
734                                      handles_,
735                                      /* is_first_run */ false);
736   rtp_fixup.Run();
737 
738   MaybeRecordStat(kInlinedPolymorphicCall);
739 
740   return true;
741 }
742 
TryInlineAndReplace(HInvoke * invoke_instruction,ArtMethod * method,bool do_rtp)743 bool HInliner::TryInlineAndReplace(HInvoke* invoke_instruction, ArtMethod* method, bool do_rtp) {
744   HInstruction* return_replacement = nullptr;
745   if (!TryBuildAndInline(invoke_instruction, method, &return_replacement)) {
746     return false;
747   }
748   if (return_replacement != nullptr) {
749     invoke_instruction->ReplaceWith(return_replacement);
750   }
751   invoke_instruction->GetBlock()->RemoveInstruction(invoke_instruction);
752   FixUpReturnReferenceType(invoke_instruction, method, return_replacement, do_rtp);
753   return true;
754 }
755 
TryBuildAndInline(HInvoke * invoke_instruction,ArtMethod * method,HInstruction ** return_replacement)756 bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
757                                  ArtMethod* method,
758                                  HInstruction** return_replacement) {
759   if (method->IsProxyMethod()) {
760     VLOG(compiler) << "Method " << PrettyMethod(method)
761                    << " is not inlined because of unimplemented inline support for proxy methods.";
762     return false;
763   }
764 
765   // Check whether we're allowed to inline. The outermost compilation unit is the relevant
766   // dex file here (though the transitivity of an inline chain would allow checking the calller).
767   if (!compiler_driver_->MayInline(method->GetDexFile(),
768                                    outer_compilation_unit_.GetDexFile())) {
769     if (TryPatternSubstitution(invoke_instruction, method, return_replacement)) {
770       VLOG(compiler) << "Successfully replaced pattern of invoke " << PrettyMethod(method);
771       MaybeRecordStat(kReplacedInvokeWithSimplePattern);
772       return true;
773     }
774     VLOG(compiler) << "Won't inline " << PrettyMethod(method) << " in "
775                    << outer_compilation_unit_.GetDexFile()->GetLocation() << " ("
776                    << caller_compilation_unit_.GetDexFile()->GetLocation() << ") from "
777                    << method->GetDexFile()->GetLocation();
778     return false;
779   }
780 
781   bool same_dex_file = IsSameDexFile(*outer_compilation_unit_.GetDexFile(), *method->GetDexFile());
782 
783   const DexFile::CodeItem* code_item = method->GetCodeItem();
784 
785   if (code_item == nullptr) {
786     VLOG(compiler) << "Method " << PrettyMethod(method)
787                    << " is not inlined because it is native";
788     return false;
789   }
790 
791   size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits();
792   if (code_item->insns_size_in_code_units_ > inline_max_code_units) {
793     VLOG(compiler) << "Method " << PrettyMethod(method)
794                    << " is too big to inline: "
795                    << code_item->insns_size_in_code_units_
796                    << " > "
797                    << inline_max_code_units;
798     return false;
799   }
800 
801   if (code_item->tries_size_ != 0) {
802     VLOG(compiler) << "Method " << PrettyMethod(method)
803                    << " is not inlined because of try block";
804     return false;
805   }
806 
807   if (!method->IsCompilable()) {
808     VLOG(compiler) << "Method " << PrettyMethod(method)
809                    << " has soft failures un-handled by the compiler, so it cannot be inlined";
810   }
811 
812   if (!method->GetDeclaringClass()->IsVerified()) {
813     uint16_t class_def_idx = method->GetDeclaringClass()->GetDexClassDefIndex();
814     if (Runtime::Current()->UseJitCompilation() ||
815         !compiler_driver_->IsMethodVerifiedWithoutFailures(
816             method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) {
817       VLOG(compiler) << "Method " << PrettyMethod(method)
818                      << " couldn't be verified, so it cannot be inlined";
819       return false;
820     }
821   }
822 
823   if (invoke_instruction->IsInvokeStaticOrDirect() &&
824       invoke_instruction->AsInvokeStaticOrDirect()->IsStaticWithImplicitClinitCheck()) {
825     // Case of a static method that cannot be inlined because it implicitly
826     // requires an initialization check of its declaring class.
827     VLOG(compiler) << "Method " << PrettyMethod(method)
828                    << " is not inlined because it is static and requires a clinit"
829                    << " check that cannot be emitted due to Dex cache limitations";
830     return false;
831   }
832 
833   if (!TryBuildAndInlineHelper(invoke_instruction, method, same_dex_file, return_replacement)) {
834     return false;
835   }
836 
837   VLOG(compiler) << "Successfully inlined " << PrettyMethod(method);
838   MaybeRecordStat(kInlinedInvoke);
839   return true;
840 }
841 
GetInvokeInputForArgVRegIndex(HInvoke * invoke_instruction,size_t arg_vreg_index)842 static HInstruction* GetInvokeInputForArgVRegIndex(HInvoke* invoke_instruction,
843                                                    size_t arg_vreg_index)
844     SHARED_REQUIRES(Locks::mutator_lock_) {
845   size_t input_index = 0;
846   for (size_t i = 0; i < arg_vreg_index; ++i, ++input_index) {
847     DCHECK_LT(input_index, invoke_instruction->GetNumberOfArguments());
848     if (Primitive::Is64BitType(invoke_instruction->InputAt(input_index)->GetType())) {
849       ++i;
850       DCHECK_NE(i, arg_vreg_index);
851     }
852   }
853   DCHECK_LT(input_index, invoke_instruction->GetNumberOfArguments());
854   return invoke_instruction->InputAt(input_index);
855 }
856 
857 // Try to recognize known simple patterns and replace invoke call with appropriate instructions.
TryPatternSubstitution(HInvoke * invoke_instruction,ArtMethod * resolved_method,HInstruction ** return_replacement)858 bool HInliner::TryPatternSubstitution(HInvoke* invoke_instruction,
859                                       ArtMethod* resolved_method,
860                                       HInstruction** return_replacement) {
861   InlineMethod inline_method;
862   if (!InlineMethodAnalyser::AnalyseMethodCode(resolved_method, &inline_method)) {
863     return false;
864   }
865 
866   switch (inline_method.opcode) {
867     case kInlineOpNop:
868       DCHECK_EQ(invoke_instruction->GetType(), Primitive::kPrimVoid);
869       *return_replacement = nullptr;
870       break;
871     case kInlineOpReturnArg:
872       *return_replacement = GetInvokeInputForArgVRegIndex(invoke_instruction,
873                                                           inline_method.d.return_data.arg);
874       break;
875     case kInlineOpNonWideConst:
876       if (resolved_method->GetShorty()[0] == 'L') {
877         DCHECK_EQ(inline_method.d.data, 0u);
878         *return_replacement = graph_->GetNullConstant();
879       } else {
880         *return_replacement = graph_->GetIntConstant(static_cast<int32_t>(inline_method.d.data));
881       }
882       break;
883     case kInlineOpIGet: {
884       const InlineIGetIPutData& data = inline_method.d.ifield_data;
885       if (data.method_is_static || data.object_arg != 0u) {
886         // TODO: Needs null check.
887         return false;
888       }
889       Handle<mirror::DexCache> dex_cache(handles_->NewHandle(resolved_method->GetDexCache()));
890       HInstruction* obj = GetInvokeInputForArgVRegIndex(invoke_instruction, data.object_arg);
891       HInstanceFieldGet* iget = CreateInstanceFieldGet(dex_cache, data.field_idx, obj);
892       DCHECK_EQ(iget->GetFieldOffset().Uint32Value(), data.field_offset);
893       DCHECK_EQ(iget->IsVolatile() ? 1u : 0u, data.is_volatile);
894       invoke_instruction->GetBlock()->InsertInstructionBefore(iget, invoke_instruction);
895       *return_replacement = iget;
896       break;
897     }
898     case kInlineOpIPut: {
899       const InlineIGetIPutData& data = inline_method.d.ifield_data;
900       if (data.method_is_static || data.object_arg != 0u) {
901         // TODO: Needs null check.
902         return false;
903       }
904       Handle<mirror::DexCache> dex_cache(handles_->NewHandle(resolved_method->GetDexCache()));
905       HInstruction* obj = GetInvokeInputForArgVRegIndex(invoke_instruction, data.object_arg);
906       HInstruction* value = GetInvokeInputForArgVRegIndex(invoke_instruction, data.src_arg);
907       HInstanceFieldSet* iput = CreateInstanceFieldSet(dex_cache, data.field_idx, obj, value);
908       DCHECK_EQ(iput->GetFieldOffset().Uint32Value(), data.field_offset);
909       DCHECK_EQ(iput->IsVolatile() ? 1u : 0u, data.is_volatile);
910       invoke_instruction->GetBlock()->InsertInstructionBefore(iput, invoke_instruction);
911       if (data.return_arg_plus1 != 0u) {
912         size_t return_arg = data.return_arg_plus1 - 1u;
913         *return_replacement = GetInvokeInputForArgVRegIndex(invoke_instruction, return_arg);
914       }
915       break;
916     }
917     case kInlineOpConstructor: {
918       const InlineConstructorData& data = inline_method.d.constructor_data;
919       // Get the indexes to arrays for easier processing.
920       uint16_t iput_field_indexes[] = {
921           data.iput0_field_index, data.iput1_field_index, data.iput2_field_index
922       };
923       uint16_t iput_args[] = { data.iput0_arg, data.iput1_arg, data.iput2_arg };
924       static_assert(arraysize(iput_args) == arraysize(iput_field_indexes), "Size mismatch");
925       // Count valid field indexes.
926       size_t number_of_iputs = 0u;
927       while (number_of_iputs != arraysize(iput_field_indexes) &&
928           iput_field_indexes[number_of_iputs] != DexFile::kDexNoIndex16) {
929         // Check that there are no duplicate valid field indexes.
930         DCHECK_EQ(0, std::count(iput_field_indexes + number_of_iputs + 1,
931                                 iput_field_indexes + arraysize(iput_field_indexes),
932                                 iput_field_indexes[number_of_iputs]));
933         ++number_of_iputs;
934       }
935       // Check that there are no valid field indexes in the rest of the array.
936       DCHECK_EQ(0, std::count_if(iput_field_indexes + number_of_iputs,
937                                  iput_field_indexes + arraysize(iput_field_indexes),
938                                  [](uint16_t index) { return index != DexFile::kDexNoIndex16; }));
939 
940       // Create HInstanceFieldSet for each IPUT that stores non-zero data.
941       Handle<mirror::DexCache> dex_cache;
942       HInstruction* obj = GetInvokeInputForArgVRegIndex(invoke_instruction, /* this */ 0u);
943       bool needs_constructor_barrier = false;
944       for (size_t i = 0; i != number_of_iputs; ++i) {
945         HInstruction* value = GetInvokeInputForArgVRegIndex(invoke_instruction, iput_args[i]);
946         if (!value->IsConstant() || !value->AsConstant()->IsZeroBitPattern()) {
947           if (dex_cache.GetReference() == nullptr) {
948             dex_cache = handles_->NewHandle(resolved_method->GetDexCache());
949           }
950           uint16_t field_index = iput_field_indexes[i];
951           HInstanceFieldSet* iput = CreateInstanceFieldSet(dex_cache, field_index, obj, value);
952           invoke_instruction->GetBlock()->InsertInstructionBefore(iput, invoke_instruction);
953 
954           // Check whether the field is final. If it is, we need to add a barrier.
955           size_t pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet());
956           ArtField* resolved_field = dex_cache->GetResolvedField(field_index, pointer_size);
957           DCHECK(resolved_field != nullptr);
958           if (resolved_field->IsFinal()) {
959             needs_constructor_barrier = true;
960           }
961         }
962       }
963       if (needs_constructor_barrier) {
964         HMemoryBarrier* barrier = new (graph_->GetArena()) HMemoryBarrier(kStoreStore, kNoDexPc);
965         invoke_instruction->GetBlock()->InsertInstructionBefore(barrier, invoke_instruction);
966       }
967       *return_replacement = nullptr;
968       break;
969     }
970     default:
971       LOG(FATAL) << "UNREACHABLE";
972       UNREACHABLE();
973   }
974   return true;
975 }
976 
CreateInstanceFieldGet(Handle<mirror::DexCache> dex_cache,uint32_t field_index,HInstruction * obj)977 HInstanceFieldGet* HInliner::CreateInstanceFieldGet(Handle<mirror::DexCache> dex_cache,
978                                                     uint32_t field_index,
979                                                     HInstruction* obj)
980     SHARED_REQUIRES(Locks::mutator_lock_) {
981   size_t pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet());
982   ArtField* resolved_field = dex_cache->GetResolvedField(field_index, pointer_size);
983   DCHECK(resolved_field != nullptr);
984   HInstanceFieldGet* iget = new (graph_->GetArena()) HInstanceFieldGet(
985       obj,
986       resolved_field->GetTypeAsPrimitiveType(),
987       resolved_field->GetOffset(),
988       resolved_field->IsVolatile(),
989       field_index,
990       resolved_field->GetDeclaringClass()->GetDexClassDefIndex(),
991       *dex_cache->GetDexFile(),
992       dex_cache,
993       // Read barrier generates a runtime call in slow path and we need a valid
994       // dex pc for the associated stack map. 0 is bogus but valid. Bug: 26854537.
995       /* dex_pc */ 0);
996   if (iget->GetType() == Primitive::kPrimNot) {
997     // Use the same dex_cache that we used for field lookup as the hint_dex_cache.
998     ReferenceTypePropagation rtp(graph_, dex_cache, handles_, /* is_first_run */ false);
999     rtp.Visit(iget);
1000   }
1001   return iget;
1002 }
1003 
CreateInstanceFieldSet(Handle<mirror::DexCache> dex_cache,uint32_t field_index,HInstruction * obj,HInstruction * value)1004 HInstanceFieldSet* HInliner::CreateInstanceFieldSet(Handle<mirror::DexCache> dex_cache,
1005                                                     uint32_t field_index,
1006                                                     HInstruction* obj,
1007                                                     HInstruction* value)
1008     SHARED_REQUIRES(Locks::mutator_lock_) {
1009   size_t pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet());
1010   ArtField* resolved_field = dex_cache->GetResolvedField(field_index, pointer_size);
1011   DCHECK(resolved_field != nullptr);
1012   HInstanceFieldSet* iput = new (graph_->GetArena()) HInstanceFieldSet(
1013       obj,
1014       value,
1015       resolved_field->GetTypeAsPrimitiveType(),
1016       resolved_field->GetOffset(),
1017       resolved_field->IsVolatile(),
1018       field_index,
1019       resolved_field->GetDeclaringClass()->GetDexClassDefIndex(),
1020       *dex_cache->GetDexFile(),
1021       dex_cache,
1022       // Read barrier generates a runtime call in slow path and we need a valid
1023       // dex pc for the associated stack map. 0 is bogus but valid. Bug: 26854537.
1024       /* dex_pc */ 0);
1025   return iput;
1026 }
1027 
TryBuildAndInlineHelper(HInvoke * invoke_instruction,ArtMethod * resolved_method,bool same_dex_file,HInstruction ** return_replacement)1028 bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
1029                                        ArtMethod* resolved_method,
1030                                        bool same_dex_file,
1031                                        HInstruction** return_replacement) {
1032   ScopedObjectAccess soa(Thread::Current());
1033   const DexFile::CodeItem* code_item = resolved_method->GetCodeItem();
1034   const DexFile& callee_dex_file = *resolved_method->GetDexFile();
1035   uint32_t method_index = resolved_method->GetDexMethodIndex();
1036   ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
1037   Handle<mirror::DexCache> dex_cache(handles_->NewHandle(resolved_method->GetDexCache()));
1038   DexCompilationUnit dex_compilation_unit(
1039       caller_compilation_unit_.GetClassLoader(),
1040       class_linker,
1041       callee_dex_file,
1042       code_item,
1043       resolved_method->GetDeclaringClass()->GetDexClassDefIndex(),
1044       method_index,
1045       resolved_method->GetAccessFlags(),
1046       /* verified_method */ nullptr,
1047       dex_cache);
1048 
1049   bool requires_ctor_barrier = false;
1050 
1051   if (dex_compilation_unit.IsConstructor()) {
1052     // If it's a super invocation and we already generate a barrier there's no need
1053     // to generate another one.
1054     // We identify super calls by looking at the "this" pointer. If its value is the
1055     // same as the local "this" pointer then we must have a super invocation.
1056     bool is_super_invocation = invoke_instruction->InputAt(0)->IsParameterValue()
1057         && invoke_instruction->InputAt(0)->AsParameterValue()->IsThis();
1058     if (is_super_invocation && graph_->ShouldGenerateConstructorBarrier()) {
1059       requires_ctor_barrier = false;
1060     } else {
1061       Thread* self = Thread::Current();
1062       requires_ctor_barrier = compiler_driver_->RequiresConstructorBarrier(self,
1063           dex_compilation_unit.GetDexFile(),
1064           dex_compilation_unit.GetClassDefIndex());
1065     }
1066   }
1067 
1068   InvokeType invoke_type = invoke_instruction->GetOriginalInvokeType();
1069   if (invoke_type == kInterface) {
1070     // We have statically resolved the dispatch. To please the class linker
1071     // at runtime, we change this call as if it was a virtual call.
1072     invoke_type = kVirtual;
1073   }
1074 
1075   const int32_t caller_instruction_counter = graph_->GetCurrentInstructionId();
1076   HGraph* callee_graph = new (graph_->GetArena()) HGraph(
1077       graph_->GetArena(),
1078       callee_dex_file,
1079       method_index,
1080       requires_ctor_barrier,
1081       compiler_driver_->GetInstructionSet(),
1082       invoke_type,
1083       graph_->IsDebuggable(),
1084       /* osr */ false,
1085       caller_instruction_counter);
1086   callee_graph->SetArtMethod(resolved_method);
1087 
1088   // When they are needed, allocate `inline_stats` on the heap instead
1089   // of on the stack, as Clang might produce a stack frame too large
1090   // for this function, that would not fit the requirements of the
1091   // `-Wframe-larger-than` option.
1092   std::unique_ptr<OptimizingCompilerStats> inline_stats =
1093       (stats_ == nullptr) ? nullptr : MakeUnique<OptimizingCompilerStats>();
1094   HGraphBuilder builder(callee_graph,
1095                         &dex_compilation_unit,
1096                         &outer_compilation_unit_,
1097                         resolved_method->GetDexFile(),
1098                         *code_item,
1099                         compiler_driver_,
1100                         inline_stats.get(),
1101                         resolved_method->GetQuickenedInfo(),
1102                         dex_cache,
1103                         handles_);
1104 
1105   if (builder.BuildGraph() != kAnalysisSuccess) {
1106     VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1107                    << " could not be built, so cannot be inlined";
1108     return false;
1109   }
1110 
1111   if (!RegisterAllocator::CanAllocateRegistersFor(*callee_graph,
1112                                                   compiler_driver_->GetInstructionSet())) {
1113     VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1114                    << " cannot be inlined because of the register allocator";
1115     return false;
1116   }
1117 
1118   size_t parameter_index = 0;
1119   for (HInstructionIterator instructions(callee_graph->GetEntryBlock()->GetInstructions());
1120        !instructions.Done();
1121        instructions.Advance()) {
1122     HInstruction* current = instructions.Current();
1123     if (current->IsParameterValue()) {
1124       HInstruction* argument = invoke_instruction->InputAt(parameter_index++);
1125       if (argument->IsNullConstant()) {
1126         current->ReplaceWith(callee_graph->GetNullConstant());
1127       } else if (argument->IsIntConstant()) {
1128         current->ReplaceWith(callee_graph->GetIntConstant(argument->AsIntConstant()->GetValue()));
1129       } else if (argument->IsLongConstant()) {
1130         current->ReplaceWith(callee_graph->GetLongConstant(argument->AsLongConstant()->GetValue()));
1131       } else if (argument->IsFloatConstant()) {
1132         current->ReplaceWith(
1133             callee_graph->GetFloatConstant(argument->AsFloatConstant()->GetValue()));
1134       } else if (argument->IsDoubleConstant()) {
1135         current->ReplaceWith(
1136             callee_graph->GetDoubleConstant(argument->AsDoubleConstant()->GetValue()));
1137       } else if (argument->GetType() == Primitive::kPrimNot) {
1138         current->SetReferenceTypeInfo(argument->GetReferenceTypeInfo());
1139         current->AsParameterValue()->SetCanBeNull(argument->CanBeNull());
1140       }
1141     }
1142   }
1143 
1144   size_t number_of_instructions_budget = kMaximumNumberOfHInstructions;
1145   size_t number_of_inlined_instructions =
1146       RunOptimizations(callee_graph, code_item, dex_compilation_unit);
1147   number_of_instructions_budget += number_of_inlined_instructions;
1148 
1149   // TODO: We should abort only if all predecessors throw. However,
1150   // HGraph::InlineInto currently does not handle an exit block with
1151   // a throw predecessor.
1152   HBasicBlock* exit_block = callee_graph->GetExitBlock();
1153   if (exit_block == nullptr) {
1154     VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1155                    << " could not be inlined because it has an infinite loop";
1156     return false;
1157   }
1158 
1159   bool has_throw_predecessor = false;
1160   for (HBasicBlock* predecessor : exit_block->GetPredecessors()) {
1161     if (predecessor->GetLastInstruction()->IsThrow()) {
1162       has_throw_predecessor = true;
1163       break;
1164     }
1165   }
1166   if (has_throw_predecessor) {
1167     VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1168                    << " could not be inlined because one branch always throws";
1169     return false;
1170   }
1171 
1172   HReversePostOrderIterator it(*callee_graph);
1173   it.Advance();  // Past the entry block, it does not contain instructions that prevent inlining.
1174   size_t number_of_instructions = 0;
1175 
1176   bool can_inline_environment =
1177       total_number_of_dex_registers_ < kMaximumNumberOfCumulatedDexRegisters;
1178 
1179   for (; !it.Done(); it.Advance()) {
1180     HBasicBlock* block = it.Current();
1181 
1182     if (block->IsLoopHeader() && block->GetLoopInformation()->IsIrreducible()) {
1183       // Don't inline methods with irreducible loops, they could prevent some
1184       // optimizations to run.
1185       VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1186                      << " could not be inlined because it contains an irreducible loop";
1187       return false;
1188     }
1189 
1190     for (HInstructionIterator instr_it(block->GetInstructions());
1191          !instr_it.Done();
1192          instr_it.Advance()) {
1193       if (number_of_instructions++ == number_of_instructions_budget) {
1194         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1195                        << " is not inlined because its caller has reached"
1196                        << " its instruction budget limit.";
1197         return false;
1198       }
1199       HInstruction* current = instr_it.Current();
1200       if (!can_inline_environment && current->NeedsEnvironment()) {
1201         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1202                        << " is not inlined because its caller has reached"
1203                        << " its environment budget limit.";
1204         return false;
1205       }
1206 
1207       if (current->IsInvokeInterface()) {
1208         // Disable inlining of interface calls. The cost in case of entering the
1209         // resolution conflict is currently too high.
1210         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1211                        << " could not be inlined because it has an interface call.";
1212         return false;
1213       }
1214 
1215       if (!same_dex_file && current->NeedsEnvironment()) {
1216         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1217                        << " could not be inlined because " << current->DebugName()
1218                        << " needs an environment and is in a different dex file";
1219         return false;
1220       }
1221 
1222       if (!same_dex_file && current->NeedsDexCacheOfDeclaringClass()) {
1223         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1224                        << " could not be inlined because " << current->DebugName()
1225                        << " it is in a different dex file and requires access to the dex cache";
1226         return false;
1227       }
1228 
1229       if (current->IsNewInstance() &&
1230           (current->AsNewInstance()->GetEntrypoint() == kQuickAllocObjectWithAccessCheck)) {
1231         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1232                        << " could not be inlined because it is using an entrypoint"
1233                        << " with access checks";
1234         // Allocation entrypoint does not handle inlined frames.
1235         return false;
1236       }
1237 
1238       if (current->IsNewArray() &&
1239           (current->AsNewArray()->GetEntrypoint() == kQuickAllocArrayWithAccessCheck)) {
1240         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1241                        << " could not be inlined because it is using an entrypoint"
1242                        << " with access checks";
1243         // Allocation entrypoint does not handle inlined frames.
1244         return false;
1245       }
1246 
1247       if (current->IsUnresolvedStaticFieldGet() ||
1248           current->IsUnresolvedInstanceFieldGet() ||
1249           current->IsUnresolvedStaticFieldSet() ||
1250           current->IsUnresolvedInstanceFieldSet()) {
1251         // Entrypoint for unresolved fields does not handle inlined frames.
1252         VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file)
1253                        << " could not be inlined because it is using an unresolved"
1254                        << " entrypoint";
1255         return false;
1256       }
1257     }
1258   }
1259   number_of_inlined_instructions_ += number_of_instructions;
1260 
1261   DCHECK_EQ(caller_instruction_counter, graph_->GetCurrentInstructionId())
1262       << "No instructions can be added to the outer graph while inner graph is being built";
1263 
1264   const int32_t callee_instruction_counter = callee_graph->GetCurrentInstructionId();
1265   graph_->SetCurrentInstructionId(callee_instruction_counter);
1266   *return_replacement = callee_graph->InlineInto(graph_, invoke_instruction);
1267 
1268   DCHECK_EQ(callee_instruction_counter, callee_graph->GetCurrentInstructionId())
1269       << "No instructions can be added to the inner graph during inlining into the outer graph";
1270 
1271   return true;
1272 }
1273 
RunOptimizations(HGraph * callee_graph,const DexFile::CodeItem * code_item,const DexCompilationUnit & dex_compilation_unit)1274 size_t HInliner::RunOptimizations(HGraph* callee_graph,
1275                                   const DexFile::CodeItem* code_item,
1276                                   const DexCompilationUnit& dex_compilation_unit) {
1277   // Note: if the outermost_graph_ is being compiled OSR, we should not run any
1278   // optimization that could lead to a HDeoptimize. The following optimizations do not.
1279   HDeadCodeElimination dce(callee_graph, stats_);
1280   HConstantFolding fold(callee_graph);
1281   HSharpening sharpening(callee_graph, codegen_, dex_compilation_unit, compiler_driver_);
1282   InstructionSimplifier simplify(callee_graph, stats_);
1283   IntrinsicsRecognizer intrinsics(callee_graph, compiler_driver_, stats_);
1284 
1285   HOptimization* optimizations[] = {
1286     &intrinsics,
1287     &sharpening,
1288     &simplify,
1289     &fold,
1290     &dce,
1291   };
1292 
1293   for (size_t i = 0; i < arraysize(optimizations); ++i) {
1294     HOptimization* optimization = optimizations[i];
1295     optimization->Run();
1296   }
1297 
1298   size_t number_of_inlined_instructions = 0u;
1299   if (depth_ + 1 < compiler_driver_->GetCompilerOptions().GetInlineDepthLimit()) {
1300     HInliner inliner(callee_graph,
1301                      outermost_graph_,
1302                      codegen_,
1303                      outer_compilation_unit_,
1304                      dex_compilation_unit,
1305                      compiler_driver_,
1306                      handles_,
1307                      stats_,
1308                      total_number_of_dex_registers_ + code_item->registers_size_,
1309                      depth_ + 1);
1310     inliner.Run();
1311     number_of_inlined_instructions += inliner.number_of_inlined_instructions_;
1312   }
1313 
1314   return number_of_inlined_instructions;
1315 }
1316 
FixUpReturnReferenceType(HInvoke * invoke_instruction,ArtMethod * resolved_method,HInstruction * return_replacement,bool do_rtp)1317 void HInliner::FixUpReturnReferenceType(HInvoke* invoke_instruction,
1318                                         ArtMethod* resolved_method,
1319                                         HInstruction* return_replacement,
1320                                         bool do_rtp) {
1321   // Check the integrity of reference types and run another type propagation if needed.
1322   if (return_replacement != nullptr) {
1323     if (return_replacement->GetType() == Primitive::kPrimNot) {
1324       if (!return_replacement->GetReferenceTypeInfo().IsValid()) {
1325         // Make sure that we have a valid type for the return. We may get an invalid one when
1326         // we inline invokes with multiple branches and create a Phi for the result.
1327         // TODO: we could be more precise by merging the phi inputs but that requires
1328         // some functionality from the reference type propagation.
1329         DCHECK(return_replacement->IsPhi());
1330         size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1331         mirror::Class* cls = resolved_method->GetReturnType(false /* resolve */, pointer_size);
1332         if (cls != nullptr && !cls->IsErroneous()) {
1333           ReferenceTypeInfo::TypeHandle return_handle = handles_->NewHandle(cls);
1334           return_replacement->SetReferenceTypeInfo(ReferenceTypeInfo::Create(
1335               return_handle, return_handle->CannotBeAssignedFromOtherTypes() /* is_exact */));
1336         } else {
1337           // Return inexact object type on failures.
1338           return_replacement->SetReferenceTypeInfo(graph_->GetInexactObjectRti());
1339         }
1340       }
1341 
1342       if (do_rtp) {
1343         // If the return type is a refinement of the declared type run the type propagation again.
1344         ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo();
1345         ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo();
1346         if (invoke_rti.IsStrictSupertypeOf(return_rti)
1347             || (return_rti.IsExact() && !invoke_rti.IsExact())
1348             || !return_replacement->CanBeNull()) {
1349           ReferenceTypePropagation(graph_,
1350                                    outer_compilation_unit_.GetDexCache(),
1351                                    handles_,
1352                                    /* is_first_run */ false).Run();
1353         }
1354       }
1355     } else if (return_replacement->IsInstanceOf()) {
1356       if (do_rtp) {
1357         // Inlining InstanceOf into an If may put a tighter bound on reference types.
1358         ReferenceTypePropagation(graph_,
1359                                  outer_compilation_unit_.GetDexCache(),
1360                                  handles_,
1361                                  /* is_first_run */ false).Run();
1362       }
1363     }
1364   }
1365 }
1366 
1367 }  // namespace art
1368