1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "compiler_driver.h"
18
19 #include <unistd.h>
20 #include <unordered_set>
21 #include <vector>
22
23 #ifndef __APPLE__
24 #include <malloc.h> // For mallinfo
25 #endif
26
27 #include "android-base/strings.h"
28
29 #include "art_field-inl.h"
30 #include "art_method-inl.h"
31 #include "base/arena_allocator.h"
32 #include "base/array_ref.h"
33 #include "base/bit_vector.h"
34 #include "base/enums.h"
35 #include "base/logging.h" // For VLOG
36 #include "base/stl_util.h"
37 #include "base/systrace.h"
38 #include "base/time_utils.h"
39 #include "base/timing_logger.h"
40 #include "class_linker-inl.h"
41 #include "compiled_method-inl.h"
42 #include "compiler.h"
43 #include "compiler_callbacks.h"
44 #include "compiler_driver-inl.h"
45 #include "dex/descriptors_names.h"
46 #include "dex/dex_file-inl.h"
47 #include "dex/dex_file_annotations.h"
48 #include "dex/dex_instruction-inl.h"
49 #include "dex/dex_to_dex_compiler.h"
50 #include "dex/verification_results.h"
51 #include "dex/verified_method.h"
52 #include "dex_compilation_unit.h"
53 #include "driver/compiler_options.h"
54 #include "gc/accounting/card_table-inl.h"
55 #include "gc/accounting/heap_bitmap.h"
56 #include "gc/space/image_space.h"
57 #include "gc/space/space.h"
58 #include "handle_scope-inl.h"
59 #include "intrinsics_enum.h"
60 #include "jit/profile_compilation_info.h"
61 #include "jni_internal.h"
62 #include "linker/linker_patch.h"
63 #include "mirror/class-inl.h"
64 #include "mirror/class_loader.h"
65 #include "mirror/dex_cache-inl.h"
66 #include "mirror/object-inl.h"
67 #include "mirror/object-refvisitor-inl.h"
68 #include "mirror/object_array-inl.h"
69 #include "mirror/throwable.h"
70 #include "nativehelper/ScopedLocalRef.h"
71 #include "object_lock.h"
72 #include "runtime.h"
73 #include "runtime_intrinsics.h"
74 #include "scoped_thread_state_change-inl.h"
75 #include "thread.h"
76 #include "thread_list.h"
77 #include "thread_pool.h"
78 #include "trampolines/trampoline_compiler.h"
79 #include "transaction.h"
80 #include "utils/atomic_dex_ref_map-inl.h"
81 #include "utils/dex_cache_arrays_layout-inl.h"
82 #include "utils/swap_space.h"
83 #include "vdex_file.h"
84 #include "verifier/method_verifier-inl.h"
85 #include "verifier/method_verifier.h"
86 #include "verifier/verifier_deps.h"
87 #include "verifier/verifier_enums.h"
88
89 namespace art {
90
91 static constexpr bool kTimeCompileMethod = !kIsDebugBuild;
92
93 // Print additional info during profile guided compilation.
94 static constexpr bool kDebugProfileGuidedCompilation = false;
95
96 // Max encoded fields allowed for initializing app image. Hardcode the number for now
97 // because 5000 should be large enough.
98 static constexpr uint32_t kMaxEncodedFields = 5000;
99
Percentage(size_t x,size_t y)100 static double Percentage(size_t x, size_t y) {
101 return 100.0 * (static_cast<double>(x)) / (static_cast<double>(x + y));
102 }
103
DumpStat(size_t x,size_t y,const char * str)104 static void DumpStat(size_t x, size_t y, const char* str) {
105 if (x == 0 && y == 0) {
106 return;
107 }
108 LOG(INFO) << Percentage(x, y) << "% of " << str << " for " << (x + y) << " cases";
109 }
110
111 class CompilerDriver::AOTCompilationStats {
112 public:
AOTCompilationStats()113 AOTCompilationStats()
114 : stats_lock_("AOT compilation statistics lock"),
115 resolved_types_(0), unresolved_types_(0),
116 resolved_instance_fields_(0), unresolved_instance_fields_(0),
117 resolved_local_static_fields_(0), resolved_static_fields_(0), unresolved_static_fields_(0),
118 type_based_devirtualization_(0),
119 safe_casts_(0), not_safe_casts_(0) {
120 for (size_t i = 0; i <= kMaxInvokeType; i++) {
121 resolved_methods_[i] = 0;
122 unresolved_methods_[i] = 0;
123 virtual_made_direct_[i] = 0;
124 direct_calls_to_boot_[i] = 0;
125 direct_methods_to_boot_[i] = 0;
126 }
127 }
128
Dump()129 void Dump() {
130 DumpStat(resolved_types_, unresolved_types_, "types resolved");
131 DumpStat(resolved_instance_fields_, unresolved_instance_fields_, "instance fields resolved");
132 DumpStat(resolved_local_static_fields_ + resolved_static_fields_, unresolved_static_fields_,
133 "static fields resolved");
134 DumpStat(resolved_local_static_fields_, resolved_static_fields_ + unresolved_static_fields_,
135 "static fields local to a class");
136 DumpStat(safe_casts_, not_safe_casts_, "check-casts removed based on type information");
137 // Note, the code below subtracts the stat value so that when added to the stat value we have
138 // 100% of samples. TODO: clean this up.
139 DumpStat(type_based_devirtualization_,
140 resolved_methods_[kVirtual] + unresolved_methods_[kVirtual] +
141 resolved_methods_[kInterface] + unresolved_methods_[kInterface] -
142 type_based_devirtualization_,
143 "virtual/interface calls made direct based on type information");
144
145 for (size_t i = 0; i <= kMaxInvokeType; i++) {
146 std::ostringstream oss;
147 oss << static_cast<InvokeType>(i) << " methods were AOT resolved";
148 DumpStat(resolved_methods_[i], unresolved_methods_[i], oss.str().c_str());
149 if (virtual_made_direct_[i] > 0) {
150 std::ostringstream oss2;
151 oss2 << static_cast<InvokeType>(i) << " methods made direct";
152 DumpStat(virtual_made_direct_[i],
153 resolved_methods_[i] + unresolved_methods_[i] - virtual_made_direct_[i],
154 oss2.str().c_str());
155 }
156 if (direct_calls_to_boot_[i] > 0) {
157 std::ostringstream oss2;
158 oss2 << static_cast<InvokeType>(i) << " method calls are direct into boot";
159 DumpStat(direct_calls_to_boot_[i],
160 resolved_methods_[i] + unresolved_methods_[i] - direct_calls_to_boot_[i],
161 oss2.str().c_str());
162 }
163 if (direct_methods_to_boot_[i] > 0) {
164 std::ostringstream oss2;
165 oss2 << static_cast<InvokeType>(i) << " method calls have methods in boot";
166 DumpStat(direct_methods_to_boot_[i],
167 resolved_methods_[i] + unresolved_methods_[i] - direct_methods_to_boot_[i],
168 oss2.str().c_str());
169 }
170 }
171 }
172
173 // Allow lossy statistics in non-debug builds.
174 #ifndef NDEBUG
175 #define STATS_LOCK() MutexLock mu(Thread::Current(), stats_lock_)
176 #else
177 #define STATS_LOCK()
178 #endif
179
TypeDoesntNeedAccessCheck()180 void TypeDoesntNeedAccessCheck() REQUIRES(!stats_lock_) {
181 STATS_LOCK();
182 resolved_types_++;
183 }
184
TypeNeedsAccessCheck()185 void TypeNeedsAccessCheck() REQUIRES(!stats_lock_) {
186 STATS_LOCK();
187 unresolved_types_++;
188 }
189
ResolvedInstanceField()190 void ResolvedInstanceField() REQUIRES(!stats_lock_) {
191 STATS_LOCK();
192 resolved_instance_fields_++;
193 }
194
UnresolvedInstanceField()195 void UnresolvedInstanceField() REQUIRES(!stats_lock_) {
196 STATS_LOCK();
197 unresolved_instance_fields_++;
198 }
199
ResolvedLocalStaticField()200 void ResolvedLocalStaticField() REQUIRES(!stats_lock_) {
201 STATS_LOCK();
202 resolved_local_static_fields_++;
203 }
204
ResolvedStaticField()205 void ResolvedStaticField() REQUIRES(!stats_lock_) {
206 STATS_LOCK();
207 resolved_static_fields_++;
208 }
209
UnresolvedStaticField()210 void UnresolvedStaticField() REQUIRES(!stats_lock_) {
211 STATS_LOCK();
212 unresolved_static_fields_++;
213 }
214
215 // Indicate that type information from the verifier led to devirtualization.
PreciseTypeDevirtualization()216 void PreciseTypeDevirtualization() REQUIRES(!stats_lock_) {
217 STATS_LOCK();
218 type_based_devirtualization_++;
219 }
220
221 // A check-cast could be eliminated due to verifier type analysis.
SafeCast()222 void SafeCast() REQUIRES(!stats_lock_) {
223 STATS_LOCK();
224 safe_casts_++;
225 }
226
227 // A check-cast couldn't be eliminated due to verifier type analysis.
NotASafeCast()228 void NotASafeCast() REQUIRES(!stats_lock_) {
229 STATS_LOCK();
230 not_safe_casts_++;
231 }
232
233 private:
234 Mutex stats_lock_;
235
236 size_t resolved_types_;
237 size_t unresolved_types_;
238
239 size_t resolved_instance_fields_;
240 size_t unresolved_instance_fields_;
241
242 size_t resolved_local_static_fields_;
243 size_t resolved_static_fields_;
244 size_t unresolved_static_fields_;
245 // Type based devirtualization for invoke interface and virtual.
246 size_t type_based_devirtualization_;
247
248 size_t resolved_methods_[kMaxInvokeType + 1];
249 size_t unresolved_methods_[kMaxInvokeType + 1];
250 size_t virtual_made_direct_[kMaxInvokeType + 1];
251 size_t direct_calls_to_boot_[kMaxInvokeType + 1];
252 size_t direct_methods_to_boot_[kMaxInvokeType + 1];
253
254 size_t safe_casts_;
255 size_t not_safe_casts_;
256
257 DISALLOW_COPY_AND_ASSIGN(AOTCompilationStats);
258 };
259
CompilerDriver(const CompilerOptions * compiler_options,VerificationResults * verification_results,Compiler::Kind compiler_kind,InstructionSet instruction_set,const InstructionSetFeatures * instruction_set_features,std::unordered_set<std::string> * image_classes,std::unordered_set<std::string> * compiled_classes,std::unordered_set<std::string> * compiled_methods,size_t thread_count,int swap_fd,const ProfileCompilationInfo * profile_compilation_info)260 CompilerDriver::CompilerDriver(
261 const CompilerOptions* compiler_options,
262 VerificationResults* verification_results,
263 Compiler::Kind compiler_kind,
264 InstructionSet instruction_set,
265 const InstructionSetFeatures* instruction_set_features,
266 std::unordered_set<std::string>* image_classes,
267 std::unordered_set<std::string>* compiled_classes,
268 std::unordered_set<std::string>* compiled_methods,
269 size_t thread_count,
270 int swap_fd,
271 const ProfileCompilationInfo* profile_compilation_info)
272 : compiler_options_(compiler_options),
273 verification_results_(verification_results),
274 compiler_(Compiler::Create(this, compiler_kind)),
275 compiler_kind_(compiler_kind),
276 instruction_set_(
277 instruction_set == InstructionSet::kArm ? InstructionSet::kThumb2 : instruction_set),
278 instruction_set_features_(instruction_set_features),
279 requires_constructor_barrier_lock_("constructor barrier lock"),
280 non_relative_linker_patch_count_(0u),
281 image_classes_(image_classes),
282 classes_to_compile_(compiled_classes),
283 methods_to_compile_(compiled_methods),
284 number_of_soft_verifier_failures_(0),
285 had_hard_verifier_failure_(false),
286 parallel_thread_count_(thread_count),
287 stats_(new AOTCompilationStats),
288 compiler_context_(nullptr),
289 support_boot_image_fixup_(true),
290 compiled_method_storage_(swap_fd),
291 profile_compilation_info_(profile_compilation_info),
292 max_arena_alloc_(0),
293 dex_to_dex_compiler_(this) {
294 DCHECK(compiler_options_ != nullptr);
295
296 compiler_->Init();
297
298 if (GetCompilerOptions().IsBootImage()) {
299 CHECK(image_classes_.get() != nullptr) << "Expected image classes for boot image";
300 }
301
302 compiled_method_storage_.SetDedupeEnabled(compiler_options_->DeduplicateCode());
303 }
304
~CompilerDriver()305 CompilerDriver::~CompilerDriver() {
306 compiled_methods_.Visit([this](const DexFileReference& ref ATTRIBUTE_UNUSED,
307 CompiledMethod* method) {
308 if (method != nullptr) {
309 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(this, method);
310 }
311 });
312 compiler_->UnInit();
313 }
314
315
316 #define CREATE_TRAMPOLINE(type, abi, offset) \
317 if (Is64BitInstructionSet(instruction_set_)) { \
318 return CreateTrampoline64(instruction_set_, abi, \
319 type ## _ENTRYPOINT_OFFSET(PointerSize::k64, offset)); \
320 } else { \
321 return CreateTrampoline32(instruction_set_, abi, \
322 type ## _ENTRYPOINT_OFFSET(PointerSize::k32, offset)); \
323 }
324
CreateJniDlsymLookup() const325 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateJniDlsymLookup() const {
326 CREATE_TRAMPOLINE(JNI, kJniAbi, pDlsymLookup)
327 }
328
CreateQuickGenericJniTrampoline() const329 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickGenericJniTrampoline()
330 const {
331 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickGenericJniTrampoline)
332 }
333
CreateQuickImtConflictTrampoline() const334 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickImtConflictTrampoline()
335 const {
336 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickImtConflictTrampoline)
337 }
338
CreateQuickResolutionTrampoline() const339 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickResolutionTrampoline()
340 const {
341 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickResolutionTrampoline)
342 }
343
CreateQuickToInterpreterBridge() const344 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickToInterpreterBridge()
345 const {
346 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge)
347 }
348 #undef CREATE_TRAMPOLINE
349
CompileAll(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)350 void CompilerDriver::CompileAll(jobject class_loader,
351 const std::vector<const DexFile*>& dex_files,
352 TimingLogger* timings) {
353 DCHECK(!Runtime::Current()->IsStarted());
354
355 InitializeThreadPools();
356
357 VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false);
358 // Precompile:
359 // 1) Load image classes
360 // 2) Resolve all classes
361 // 3) Attempt to verify all classes
362 // 4) Attempt to initialize image classes, and trivially initialized classes
363 PreCompile(class_loader, dex_files, timings);
364 if (GetCompilerOptions().IsBootImage()) {
365 // We don't need to setup the intrinsics for non boot image compilation, as
366 // those compilations will pick up a boot image that have the ArtMethod already
367 // set with the intrinsics flag.
368 InitializeIntrinsics();
369 }
370 // Compile:
371 // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex
372 // compilation.
373 if (GetCompilerOptions().IsAnyCompilationEnabled()) {
374 Compile(class_loader, dex_files, timings);
375 }
376 if (GetCompilerOptions().GetDumpStats()) {
377 stats_->Dump();
378 }
379
380 FreeThreadPools();
381 }
382
GetDexToDexCompilationLevel(Thread * self,const CompilerDriver & driver,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const DexFile::ClassDef & class_def)383 static optimizer::DexToDexCompiler::CompilationLevel GetDexToDexCompilationLevel(
384 Thread* self, const CompilerDriver& driver, Handle<mirror::ClassLoader> class_loader,
385 const DexFile& dex_file, const DexFile::ClassDef& class_def)
386 REQUIRES_SHARED(Locks::mutator_lock_) {
387 // When the dex file is uncompressed in the APK, we do not generate a copy in the .vdex
388 // file. As a result, dex2oat will map the dex file read-only, and we only need to check
389 // that to know if we can do quickening.
390 if (dex_file.GetContainer() != nullptr && dex_file.GetContainer()->IsReadOnly()) {
391 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile;
392 }
393 auto* const runtime = Runtime::Current();
394 DCHECK(driver.GetCompilerOptions().IsQuickeningCompilationEnabled());
395 const char* descriptor = dex_file.GetClassDescriptor(class_def);
396 ClassLinker* class_linker = runtime->GetClassLinker();
397 mirror::Class* klass = class_linker->FindClass(self, descriptor, class_loader);
398 if (klass == nullptr) {
399 CHECK(self->IsExceptionPending());
400 self->ClearException();
401 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile;
402 }
403 // DexToDex at the kOptimize level may introduce quickened opcodes, which replace symbolic
404 // references with actual offsets. We cannot re-verify such instructions.
405 //
406 // We store the verification information in the class status in the oat file, which the linker
407 // can validate (checksums) and use to skip load-time verification. It is thus safe to
408 // optimize when a class has been fully verified before.
409 optimizer::DexToDexCompiler::CompilationLevel max_level =
410 optimizer::DexToDexCompiler::CompilationLevel::kOptimize;
411 if (driver.GetCompilerOptions().GetDebuggable()) {
412 // We are debuggable so definitions of classes might be changed. We don't want to do any
413 // optimizations that could break that.
414 max_level = optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile;
415 }
416 if (klass->IsVerified()) {
417 // Class is verified so we can enable DEX-to-DEX compilation for performance.
418 return max_level;
419 } else {
420 // Class verification has failed: do not run DEX-to-DEX optimizations.
421 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile;
422 }
423 }
424
GetDexToDexCompilationLevel(Thread * self,const CompilerDriver & driver,jobject jclass_loader,const DexFile & dex_file,const DexFile::ClassDef & class_def)425 static optimizer::DexToDexCompiler::CompilationLevel GetDexToDexCompilationLevel(
426 Thread* self,
427 const CompilerDriver& driver,
428 jobject jclass_loader,
429 const DexFile& dex_file,
430 const DexFile::ClassDef& class_def) {
431 ScopedObjectAccess soa(self);
432 StackHandleScope<1> hs(soa.Self());
433 Handle<mirror::ClassLoader> class_loader(
434 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
435 return GetDexToDexCompilationLevel(self, driver, class_loader, dex_file, class_def);
436 }
437
438 // Does the runtime for the InstructionSet provide an implementation returned by
439 // GetQuickGenericJniStub allowing down calls that aren't compiled using a JNI compiler?
InstructionSetHasGenericJniStub(InstructionSet isa)440 static bool InstructionSetHasGenericJniStub(InstructionSet isa) {
441 switch (isa) {
442 case InstructionSet::kArm:
443 case InstructionSet::kArm64:
444 case InstructionSet::kThumb2:
445 case InstructionSet::kMips:
446 case InstructionSet::kMips64:
447 case InstructionSet::kX86:
448 case InstructionSet::kX86_64: return true;
449 default: return false;
450 }
451 }
452
453 template <typename CompileFn>
CompileMethodHarness(Thread * self,CompilerDriver * driver,const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,bool compilation_enabled,Handle<mirror::DexCache> dex_cache,CompileFn compile_fn)454 static void CompileMethodHarness(
455 Thread* self,
456 CompilerDriver* driver,
457 const DexFile::CodeItem* code_item,
458 uint32_t access_flags,
459 InvokeType invoke_type,
460 uint16_t class_def_idx,
461 uint32_t method_idx,
462 Handle<mirror::ClassLoader> class_loader,
463 const DexFile& dex_file,
464 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,
465 bool compilation_enabled,
466 Handle<mirror::DexCache> dex_cache,
467 CompileFn compile_fn) {
468 DCHECK(driver != nullptr);
469 CompiledMethod* compiled_method;
470 uint64_t start_ns = kTimeCompileMethod ? NanoTime() : 0;
471 MethodReference method_ref(&dex_file, method_idx);
472
473 compiled_method = compile_fn(self,
474 driver,
475 code_item,
476 access_flags,
477 invoke_type,
478 class_def_idx,
479 method_idx,
480 class_loader,
481 dex_file,
482 dex_to_dex_compilation_level,
483 compilation_enabled,
484 dex_cache);
485
486 if (kTimeCompileMethod) {
487 uint64_t duration_ns = NanoTime() - start_ns;
488 if (duration_ns > MsToNs(driver->GetCompiler()->GetMaximumCompilationTimeBeforeWarning())) {
489 LOG(WARNING) << "Compilation of " << dex_file.PrettyMethod(method_idx)
490 << " took " << PrettyDuration(duration_ns);
491 }
492 }
493
494 if (compiled_method != nullptr) {
495 // Count non-relative linker patches.
496 size_t non_relative_linker_patch_count = 0u;
497 for (const linker::LinkerPatch& patch : compiled_method->GetPatches()) {
498 if (!patch.IsPcRelative()) {
499 ++non_relative_linker_patch_count;
500 }
501 }
502 bool compile_pic = driver->GetCompilerOptions().GetCompilePic(); // Off by default
503 // When compiling with PIC, there should be zero non-relative linker patches
504 CHECK(!compile_pic || non_relative_linker_patch_count == 0u);
505
506 driver->AddCompiledMethod(method_ref, compiled_method, non_relative_linker_patch_count);
507 }
508
509 if (self->IsExceptionPending()) {
510 ScopedObjectAccess soa(self);
511 LOG(FATAL) << "Unexpected exception compiling: " << dex_file.PrettyMethod(method_idx) << "\n"
512 << self->GetException()->Dump();
513 }
514 }
515
CompileMethodDex2Dex(Thread * self,CompilerDriver * driver,const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,bool compilation_enabled,Handle<mirror::DexCache> dex_cache)516 static void CompileMethodDex2Dex(
517 Thread* self,
518 CompilerDriver* driver,
519 const DexFile::CodeItem* code_item,
520 uint32_t access_flags,
521 InvokeType invoke_type,
522 uint16_t class_def_idx,
523 uint32_t method_idx,
524 Handle<mirror::ClassLoader> class_loader,
525 const DexFile& dex_file,
526 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,
527 bool compilation_enabled,
528 Handle<mirror::DexCache> dex_cache) {
529 auto dex_2_dex_fn = [](Thread* self ATTRIBUTE_UNUSED,
530 CompilerDriver* driver,
531 const DexFile::CodeItem* code_item,
532 uint32_t access_flags,
533 InvokeType invoke_type,
534 uint16_t class_def_idx,
535 uint32_t method_idx,
536 Handle<mirror::ClassLoader> class_loader,
537 const DexFile& dex_file,
538 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,
539 bool compilation_enabled ATTRIBUTE_UNUSED,
540 Handle<mirror::DexCache> dex_cache ATTRIBUTE_UNUSED) -> CompiledMethod* {
541 DCHECK(driver != nullptr);
542 MethodReference method_ref(&dex_file, method_idx);
543
544 optimizer::DexToDexCompiler* const compiler = &driver->GetDexToDexCompiler();
545
546 if (compiler->ShouldCompileMethod(method_ref)) {
547 VerificationResults* results = driver->GetVerificationResults();
548 DCHECK(results != nullptr);
549 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref);
550 // Do not optimize if a VerifiedMethod is missing. SafeCast elision,
551 // for example, relies on it.
552 return compiler->CompileMethod(
553 code_item,
554 access_flags,
555 invoke_type,
556 class_def_idx,
557 method_idx,
558 class_loader,
559 dex_file,
560 (verified_method != nullptr)
561 ? dex_to_dex_compilation_level
562 : optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile);
563 }
564 return nullptr;
565 };
566 CompileMethodHarness(self,
567 driver,
568 code_item,
569 access_flags,
570 invoke_type,
571 class_def_idx,
572 method_idx,
573 class_loader,
574 dex_file,
575 dex_to_dex_compilation_level,
576 compilation_enabled,
577 dex_cache,
578 dex_2_dex_fn);
579 }
580
CompileMethodQuick(Thread * self,CompilerDriver * driver,const DexFile::CodeItem * code_item,uint32_t access_flags,InvokeType invoke_type,uint16_t class_def_idx,uint32_t method_idx,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,bool compilation_enabled,Handle<mirror::DexCache> dex_cache)581 static void CompileMethodQuick(
582 Thread* self,
583 CompilerDriver* driver,
584 const DexFile::CodeItem* code_item,
585 uint32_t access_flags,
586 InvokeType invoke_type,
587 uint16_t class_def_idx,
588 uint32_t method_idx,
589 Handle<mirror::ClassLoader> class_loader,
590 const DexFile& dex_file,
591 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,
592 bool compilation_enabled,
593 Handle<mirror::DexCache> dex_cache) {
594 auto quick_fn = [](
595 Thread* self,
596 CompilerDriver* driver,
597 const DexFile::CodeItem* code_item,
598 uint32_t access_flags,
599 InvokeType invoke_type,
600 uint16_t class_def_idx,
601 uint32_t method_idx,
602 Handle<mirror::ClassLoader> class_loader,
603 const DexFile& dex_file,
604 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level,
605 bool compilation_enabled,
606 Handle<mirror::DexCache> dex_cache) {
607 DCHECK(driver != nullptr);
608 CompiledMethod* compiled_method = nullptr;
609 MethodReference method_ref(&dex_file, method_idx);
610
611 if ((access_flags & kAccNative) != 0) {
612 // Are we extracting only and have support for generic JNI down calls?
613 if (!driver->GetCompilerOptions().IsJniCompilationEnabled() &&
614 InstructionSetHasGenericJniStub(driver->GetInstructionSet())) {
615 // Leaving this empty will trigger the generic JNI version
616 } else {
617 // Query any JNI optimization annotations such as @FastNative or @CriticalNative.
618 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
619 dex_file, dex_file.GetClassDef(class_def_idx), method_idx);
620
621 compiled_method = driver->GetCompiler()->JniCompile(
622 access_flags, method_idx, dex_file, dex_cache);
623 CHECK(compiled_method != nullptr);
624 }
625 } else if ((access_flags & kAccAbstract) != 0) {
626 // Abstract methods don't have code.
627 } else {
628 VerificationResults* results = driver->GetVerificationResults();
629 DCHECK(results != nullptr);
630 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref);
631 bool compile = compilation_enabled &&
632 // Basic checks, e.g., not <clinit>.
633 results->IsCandidateForCompilation(method_ref, access_flags) &&
634 // Did not fail to create VerifiedMethod metadata.
635 verified_method != nullptr &&
636 // Do not have failures that should punt to the interpreter.
637 !verified_method->HasRuntimeThrow() &&
638 (verified_method->GetEncounteredVerificationFailures() &
639 (verifier::VERIFY_ERROR_FORCE_INTERPRETER | verifier::VERIFY_ERROR_LOCKING)) == 0 &&
640 // Is eligable for compilation by methods-to-compile filter.
641 driver->IsMethodToCompile(method_ref) &&
642 driver->ShouldCompileBasedOnProfile(method_ref);
643
644 if (compile) {
645 // NOTE: if compiler declines to compile this method, it will return null.
646 compiled_method = driver->GetCompiler()->Compile(code_item,
647 access_flags,
648 invoke_type,
649 class_def_idx,
650 method_idx,
651 class_loader,
652 dex_file,
653 dex_cache);
654 }
655 if (compiled_method == nullptr &&
656 dex_to_dex_compilation_level !=
657 optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile) {
658 DCHECK(!Runtime::Current()->UseJitCompilation());
659 // TODO: add a command-line option to disable DEX-to-DEX compilation ?
660 driver->GetDexToDexCompiler().MarkForCompilation(self, method_ref);
661 }
662 }
663 return compiled_method;
664 };
665 CompileMethodHarness(self,
666 driver,
667 code_item,
668 access_flags,
669 invoke_type,
670 class_def_idx,
671 method_idx,
672 class_loader,
673 dex_file,
674 dex_to_dex_compilation_level,
675 compilation_enabled,
676 dex_cache,
677 quick_fn);
678 }
679
CompileOne(Thread * self,ArtMethod * method,TimingLogger * timings)680 void CompilerDriver::CompileOne(Thread* self, ArtMethod* method, TimingLogger* timings) {
681 DCHECK(!Runtime::Current()->IsStarted());
682 jobject jclass_loader;
683 const DexFile* dex_file;
684 uint16_t class_def_idx;
685 uint32_t method_idx = method->GetDexMethodIndex();
686 uint32_t access_flags = method->GetAccessFlags();
687 InvokeType invoke_type = method->GetInvokeType();
688 StackHandleScope<2> hs(self);
689 Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
690 Handle<mirror::ClassLoader> class_loader(
691 hs.NewHandle(method->GetDeclaringClass()->GetClassLoader()));
692 {
693 ScopedObjectAccessUnchecked soa(self);
694 ScopedLocalRef<jobject> local_class_loader(
695 soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
696 jclass_loader = soa.Env()->NewGlobalRef(local_class_loader.get());
697 // Find the dex_file
698 dex_file = method->GetDexFile();
699 class_def_idx = method->GetClassDefIndex();
700 }
701 const DexFile::CodeItem* code_item = dex_file->GetCodeItem(method->GetCodeItemOffset());
702
703 // Go to native so that we don't block GC during compilation.
704 ScopedThreadSuspension sts(self, kNative);
705
706 std::vector<const DexFile*> dex_files;
707 dex_files.push_back(dex_file);
708
709 InitializeThreadPools();
710
711 PreCompile(jclass_loader, dex_files, timings);
712
713 // Can we run DEX-to-DEX compiler on this class ?
714 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level =
715 GetDexToDexCompilationLevel(self,
716 *this,
717 jclass_loader,
718 *dex_file,
719 dex_file->GetClassDef(class_def_idx));
720
721 CompileMethodQuick(self,
722 this,
723 code_item,
724 access_flags,
725 invoke_type,
726 class_def_idx,
727 method_idx,
728 class_loader,
729 *dex_file,
730 dex_to_dex_compilation_level,
731 true,
732 dex_cache);
733
734 const size_t num_methods = dex_to_dex_compiler_.NumCodeItemsToQuicken(self);
735 if (num_methods != 0) {
736 DCHECK_EQ(num_methods, 1u);
737 CompileMethodDex2Dex(self,
738 this,
739 code_item,
740 access_flags,
741 invoke_type,
742 class_def_idx,
743 method_idx,
744 class_loader,
745 *dex_file,
746 dex_to_dex_compilation_level,
747 true,
748 dex_cache);
749 dex_to_dex_compiler_.ClearState();
750 }
751
752 FreeThreadPools();
753
754 self->GetJniEnv()->DeleteGlobalRef(jclass_loader);
755 }
756
Resolve(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)757 void CompilerDriver::Resolve(jobject class_loader,
758 const std::vector<const DexFile*>& dex_files,
759 TimingLogger* timings) {
760 // Resolution allocates classes and needs to run single-threaded to be deterministic.
761 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
762 ThreadPool* resolve_thread_pool = force_determinism
763 ? single_thread_pool_.get()
764 : parallel_thread_pool_.get();
765 size_t resolve_thread_count = force_determinism ? 1U : parallel_thread_count_;
766
767 for (size_t i = 0; i != dex_files.size(); ++i) {
768 const DexFile* dex_file = dex_files[i];
769 CHECK(dex_file != nullptr);
770 ResolveDexFile(class_loader,
771 *dex_file,
772 dex_files,
773 resolve_thread_pool,
774 resolve_thread_count,
775 timings);
776 }
777 }
778
779 // Resolve const-strings in the code. Done to have deterministic allocation behavior. Right now
780 // this is single-threaded for simplicity.
781 // TODO: Collect the relevant string indices in parallel, then allocate them sequentially in a
782 // stable order.
783
ResolveConstStrings(Handle<mirror::DexCache> dex_cache,const DexFile & dex_file,const DexFile::CodeItem * code_item)784 static void ResolveConstStrings(Handle<mirror::DexCache> dex_cache,
785 const DexFile& dex_file,
786 const DexFile::CodeItem* code_item)
787 REQUIRES_SHARED(Locks::mutator_lock_) {
788 if (code_item == nullptr) {
789 // Abstract or native method.
790 return;
791 }
792
793 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
794 for (const DexInstructionPcPair& inst : CodeItemInstructionAccessor(dex_file, code_item)) {
795 switch (inst->Opcode()) {
796 case Instruction::CONST_STRING:
797 case Instruction::CONST_STRING_JUMBO: {
798 dex::StringIndex string_index((inst->Opcode() == Instruction::CONST_STRING)
799 ? inst->VRegB_21c()
800 : inst->VRegB_31c());
801 ObjPtr<mirror::String> string = class_linker->ResolveString(string_index, dex_cache);
802 CHECK(string != nullptr) << "Could not allocate a string when forcing determinism";
803 break;
804 }
805
806 default:
807 break;
808 }
809 }
810 }
811
ResolveConstStrings(CompilerDriver * driver,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)812 static void ResolveConstStrings(CompilerDriver* driver,
813 const std::vector<const DexFile*>& dex_files,
814 TimingLogger* timings) {
815 ScopedObjectAccess soa(Thread::Current());
816 StackHandleScope<1> hs(soa.Self());
817 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
818 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr));
819
820 for (const DexFile* dex_file : dex_files) {
821 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file));
822 TimingLogger::ScopedTiming t("Resolve const-string Strings", timings);
823
824 size_t class_def_count = dex_file->NumClassDefs();
825 for (size_t class_def_index = 0; class_def_index < class_def_count; ++class_def_index) {
826 const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
827
828 const uint8_t* class_data = dex_file->GetClassData(class_def);
829 if (class_data == nullptr) {
830 // empty class, probably a marker interface
831 continue;
832 }
833
834 ClassDataItemIterator it(*dex_file, class_data);
835 it.SkipAllFields();
836
837 bool compilation_enabled = driver->IsClassToCompile(
838 dex_file->StringByTypeIdx(class_def.class_idx_));
839 if (!compilation_enabled) {
840 // Compilation is skipped, do not resolve const-string in code of this class.
841 // TODO: Make sure that inlining honors this.
842 continue;
843 }
844
845 // Direct and virtual methods.
846 int64_t previous_method_idx = -1;
847 while (it.HasNextMethod()) {
848 uint32_t method_idx = it.GetMemberIndex();
849 if (method_idx == previous_method_idx) {
850 // smali can create dex files with two encoded_methods sharing the same method_idx
851 // http://code.google.com/p/smali/issues/detail?id=119
852 it.Next();
853 continue;
854 }
855 previous_method_idx = method_idx;
856 ResolveConstStrings(dex_cache, *dex_file, it.GetMethodCodeItem());
857 it.Next();
858 }
859 DCHECK(!it.HasNext());
860 }
861 }
862 }
863
CheckThreadPools()864 inline void CompilerDriver::CheckThreadPools() {
865 DCHECK(parallel_thread_pool_ != nullptr);
866 DCHECK(single_thread_pool_ != nullptr);
867 }
868
EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader,const std::vector<const DexFile * > & dex_files)869 static void EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader,
870 const std::vector<const DexFile*>& dex_files) {
871 ScopedObjectAccess soa(Thread::Current());
872 StackHandleScope<2> hs(soa.Self());
873 Handle<mirror::ClassLoader> class_loader(
874 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
875 MutableHandle<mirror::Class> cls(hs.NewHandle<mirror::Class>(nullptr));
876 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
877
878 for (const DexFile* dex_file : dex_files) {
879 for (uint32_t i = 0; i < dex_file->NumClassDefs(); ++i) {
880 const DexFile::ClassDef& class_def = dex_file->GetClassDef(i);
881 const char* descriptor = dex_file->GetClassDescriptor(class_def);
882 cls.Assign(class_linker->FindClass(soa.Self(), descriptor, class_loader));
883 if (cls == nullptr) {
884 soa.Self()->ClearException();
885 } else if (&cls->GetDexFile() == dex_file) {
886 DCHECK(cls->IsErroneous() || cls->IsVerified() || cls->ShouldVerifyAtRuntime())
887 << cls->PrettyClass()
888 << " " << cls->GetStatus();
889 }
890 }
891 }
892 }
893
PreCompile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)894 void CompilerDriver::PreCompile(jobject class_loader,
895 const std::vector<const DexFile*>& dex_files,
896 TimingLogger* timings) {
897 CheckThreadPools();
898
899 LoadImageClasses(timings);
900 VLOG(compiler) << "LoadImageClasses: " << GetMemoryUsageString(false);
901
902 if (compiler_options_->IsAnyCompilationEnabled()) {
903 // Avoid adding the dex files in the case where we aren't going to add compiled methods.
904 // This reduces RAM usage for this case.
905 for (const DexFile* dex_file : dex_files) {
906 // Can be already inserted if the caller is CompileOne. This happens for gtests.
907 if (!compiled_methods_.HaveDexFile(dex_file)) {
908 compiled_methods_.AddDexFile(dex_file);
909 }
910 }
911 // Resolve eagerly to prepare for compilation.
912 Resolve(class_loader, dex_files, timings);
913 VLOG(compiler) << "Resolve: " << GetMemoryUsageString(false);
914 }
915
916 if (compiler_options_->AssumeClassesAreVerified()) {
917 VLOG(compiler) << "Verify none mode specified, skipping verification.";
918 SetVerified(class_loader, dex_files, timings);
919 }
920
921 if (!compiler_options_->IsVerificationEnabled()) {
922 return;
923 }
924
925 if (GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) {
926 // Resolve strings from const-string. Do this now to have a deterministic image.
927 ResolveConstStrings(this, dex_files, timings);
928 VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false);
929 }
930
931 Verify(class_loader, dex_files, timings);
932 VLOG(compiler) << "Verify: " << GetMemoryUsageString(false);
933
934 if (had_hard_verifier_failure_ && GetCompilerOptions().AbortOnHardVerifierFailure()) {
935 // Avoid dumping threads. Even if we shut down the thread pools, there will still be three
936 // instances of this thread's stack.
937 LOG(FATAL_WITHOUT_ABORT) << "Had a hard failure verifying all classes, and was asked to abort "
938 << "in such situations. Please check the log.";
939 _exit(1);
940 } else if (number_of_soft_verifier_failures_ > 0 &&
941 GetCompilerOptions().AbortOnSoftVerifierFailure()) {
942 LOG(FATAL_WITHOUT_ABORT) << "Had " << number_of_soft_verifier_failures_ << " soft failure(s) "
943 << "verifying all classes, and was asked to abort in such situations. "
944 << "Please check the log.";
945 _exit(1);
946 }
947
948 if (compiler_options_->IsAnyCompilationEnabled()) {
949 if (kIsDebugBuild) {
950 EnsureVerifiedOrVerifyAtRuntime(class_loader, dex_files);
951 }
952 InitializeClasses(class_loader, dex_files, timings);
953 VLOG(compiler) << "InitializeClasses: " << GetMemoryUsageString(false);
954 }
955
956 UpdateImageClasses(timings);
957 VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false);
958 }
959
IsImageClass(const char * descriptor) const960 bool CompilerDriver::IsImageClass(const char* descriptor) const {
961 if (image_classes_ != nullptr) {
962 // If we have a set of image classes, use those.
963 return image_classes_->find(descriptor) != image_classes_->end();
964 }
965 // No set of image classes, assume we include all the classes.
966 // NOTE: Currently only reachable from InitImageMethodVisitor for the app image case.
967 return !GetCompilerOptions().IsBootImage();
968 }
969
IsClassToCompile(const char * descriptor) const970 bool CompilerDriver::IsClassToCompile(const char* descriptor) const {
971 if (classes_to_compile_ == nullptr) {
972 return true;
973 }
974 return classes_to_compile_->find(descriptor) != classes_to_compile_->end();
975 }
976
IsMethodToCompile(const MethodReference & method_ref) const977 bool CompilerDriver::IsMethodToCompile(const MethodReference& method_ref) const {
978 if (methods_to_compile_ == nullptr) {
979 return true;
980 }
981
982 std::string tmp = method_ref.PrettyMethod();
983 return methods_to_compile_->find(tmp.c_str()) != methods_to_compile_->end();
984 }
985
ShouldCompileBasedOnProfile(const MethodReference & method_ref) const986 bool CompilerDriver::ShouldCompileBasedOnProfile(const MethodReference& method_ref) const {
987 // Profile compilation info may be null if no profile is passed.
988 if (!CompilerFilter::DependsOnProfile(compiler_options_->GetCompilerFilter())) {
989 // Use the compiler filter instead of the presence of profile_compilation_info_ since
990 // we may want to have full speed compilation along with profile based layout optimizations.
991 return true;
992 }
993 // If we are using a profile filter but do not have a profile compilation info, compile nothing.
994 if (profile_compilation_info_ == nullptr) {
995 return false;
996 }
997 // Compile only hot methods, it is the profile saver's job to decide what startup methods to mark
998 // as hot.
999 bool result = profile_compilation_info_->GetMethodHotness(method_ref).IsHot();
1000
1001 if (kDebugProfileGuidedCompilation) {
1002 LOG(INFO) << "[ProfileGuidedCompilation] "
1003 << (result ? "Compiled" : "Skipped") << " method:" << method_ref.PrettyMethod(true);
1004 }
1005 return result;
1006 }
1007
1008 class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor {
1009 public:
ResolveCatchBlockExceptionsClassVisitor()1010 ResolveCatchBlockExceptionsClassVisitor() : classes_() {}
1011
operator ()(ObjPtr<mirror::Class> c)1012 virtual bool operator()(ObjPtr<mirror::Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
1013 classes_.push_back(c);
1014 return true;
1015 }
1016
FindExceptionTypesToResolve(std::set<std::pair<dex::TypeIndex,const DexFile * >> * exceptions_to_resolve)1017 void FindExceptionTypesToResolve(
1018 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve)
1019 REQUIRES_SHARED(Locks::mutator_lock_) {
1020 const auto pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1021 for (ObjPtr<mirror::Class> klass : classes_) {
1022 for (ArtMethod& method : klass->GetMethods(pointer_size)) {
1023 FindExceptionTypesToResolveForMethod(&method, exceptions_to_resolve);
1024 }
1025 }
1026 }
1027
1028 private:
FindExceptionTypesToResolveForMethod(ArtMethod * method,std::set<std::pair<dex::TypeIndex,const DexFile * >> * exceptions_to_resolve)1029 void FindExceptionTypesToResolveForMethod(
1030 ArtMethod* method,
1031 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve)
1032 REQUIRES_SHARED(Locks::mutator_lock_) {
1033 if (method->GetCodeItem() == nullptr) {
1034 return; // native or abstract method
1035 }
1036 CodeItemDataAccessor accessor(method->DexInstructionData());
1037 if (accessor.TriesSize() == 0) {
1038 return; // nothing to process
1039 }
1040 const uint8_t* encoded_catch_handler_list = accessor.GetCatchHandlerData();
1041 size_t num_encoded_catch_handlers = DecodeUnsignedLeb128(&encoded_catch_handler_list);
1042 for (size_t i = 0; i < num_encoded_catch_handlers; i++) {
1043 int32_t encoded_catch_handler_size = DecodeSignedLeb128(&encoded_catch_handler_list);
1044 bool has_catch_all = false;
1045 if (encoded_catch_handler_size <= 0) {
1046 encoded_catch_handler_size = -encoded_catch_handler_size;
1047 has_catch_all = true;
1048 }
1049 for (int32_t j = 0; j < encoded_catch_handler_size; j++) {
1050 dex::TypeIndex encoded_catch_handler_handlers_type_idx =
1051 dex::TypeIndex(DecodeUnsignedLeb128(&encoded_catch_handler_list));
1052 // Add to set of types to resolve if not already in the dex cache resolved types
1053 if (!method->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx)) {
1054 exceptions_to_resolve->emplace(encoded_catch_handler_handlers_type_idx,
1055 method->GetDexFile());
1056 }
1057 // ignore address associated with catch handler
1058 DecodeUnsignedLeb128(&encoded_catch_handler_list);
1059 }
1060 if (has_catch_all) {
1061 // ignore catch all address
1062 DecodeUnsignedLeb128(&encoded_catch_handler_list);
1063 }
1064 }
1065 }
1066
1067 std::vector<ObjPtr<mirror::Class>> classes_;
1068 };
1069
1070 class RecordImageClassesVisitor : public ClassVisitor {
1071 public:
RecordImageClassesVisitor(std::unordered_set<std::string> * image_classes)1072 explicit RecordImageClassesVisitor(std::unordered_set<std::string>* image_classes)
1073 : image_classes_(image_classes) {}
1074
operator ()(ObjPtr<mirror::Class> klass)1075 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
1076 std::string temp;
1077 image_classes_->insert(klass->GetDescriptor(&temp));
1078 return true;
1079 }
1080
1081 private:
1082 std::unordered_set<std::string>* const image_classes_;
1083 };
1084
1085 // Make a list of descriptors for classes to include in the image
LoadImageClasses(TimingLogger * timings)1086 void CompilerDriver::LoadImageClasses(TimingLogger* timings) {
1087 CHECK(timings != nullptr);
1088 if (!GetCompilerOptions().IsBootImage()) {
1089 return;
1090 }
1091
1092 TimingLogger::ScopedTiming t("LoadImageClasses", timings);
1093 // Make a first class to load all classes explicitly listed in the file
1094 Thread* self = Thread::Current();
1095 ScopedObjectAccess soa(self);
1096 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1097 CHECK(image_classes_.get() != nullptr);
1098 for (auto it = image_classes_->begin(), end = image_classes_->end(); it != end;) {
1099 const std::string& descriptor(*it);
1100 StackHandleScope<1> hs(self);
1101 Handle<mirror::Class> klass(
1102 hs.NewHandle(class_linker->FindSystemClass(self, descriptor.c_str())));
1103 if (klass == nullptr) {
1104 VLOG(compiler) << "Failed to find class " << descriptor;
1105 image_classes_->erase(it++);
1106 self->ClearException();
1107 } else {
1108 ++it;
1109 }
1110 }
1111
1112 // Resolve exception classes referenced by the loaded classes. The catch logic assumes
1113 // exceptions are resolved by the verifier when there is a catch block in an interested method.
1114 // Do this here so that exception classes appear to have been specified image classes.
1115 std::set<std::pair<dex::TypeIndex, const DexFile*>> unresolved_exception_types;
1116 StackHandleScope<1> hs(self);
1117 Handle<mirror::Class> java_lang_Throwable(
1118 hs.NewHandle(class_linker->FindSystemClass(self, "Ljava/lang/Throwable;")));
1119 do {
1120 unresolved_exception_types.clear();
1121 {
1122 // Thread suspension is not allowed while ResolveCatchBlockExceptionsClassVisitor
1123 // is using a std::vector<ObjPtr<mirror::Class>>.
1124 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
1125 ResolveCatchBlockExceptionsClassVisitor visitor;
1126 class_linker->VisitClasses(&visitor);
1127 visitor.FindExceptionTypesToResolve(&unresolved_exception_types);
1128 }
1129 for (const auto& exception_type : unresolved_exception_types) {
1130 dex::TypeIndex exception_type_idx = exception_type.first;
1131 const DexFile* dex_file = exception_type.second;
1132 StackHandleScope<1> hs2(self);
1133 Handle<mirror::DexCache> dex_cache(hs2.NewHandle(class_linker->RegisterDexFile(*dex_file,
1134 nullptr)));
1135 ObjPtr<mirror::Class> klass =
1136 (dex_cache != nullptr)
1137 ? class_linker->ResolveType(exception_type_idx,
1138 dex_cache,
1139 ScopedNullHandle<mirror::ClassLoader>())
1140 : nullptr;
1141 if (klass == nullptr) {
1142 const DexFile::TypeId& type_id = dex_file->GetTypeId(exception_type_idx);
1143 const char* descriptor = dex_file->GetTypeDescriptor(type_id);
1144 LOG(FATAL) << "Failed to resolve class " << descriptor;
1145 }
1146 DCHECK(java_lang_Throwable->IsAssignableFrom(klass));
1147 }
1148 // Resolving exceptions may load classes that reference more exceptions, iterate until no
1149 // more are found
1150 } while (!unresolved_exception_types.empty());
1151
1152 // We walk the roots looking for classes so that we'll pick up the
1153 // above classes plus any classes them depend on such super
1154 // classes, interfaces, and the required ClassLinker roots.
1155 RecordImageClassesVisitor visitor(image_classes_.get());
1156 class_linker->VisitClasses(&visitor);
1157
1158 CHECK_NE(image_classes_->size(), 0U);
1159 }
1160
MaybeAddToImageClasses(Thread * self,ObjPtr<mirror::Class> klass,std::unordered_set<std::string> * image_classes)1161 static void MaybeAddToImageClasses(Thread* self,
1162 ObjPtr<mirror::Class> klass,
1163 std::unordered_set<std::string>* image_classes)
1164 REQUIRES_SHARED(Locks::mutator_lock_) {
1165 DCHECK_EQ(self, Thread::Current());
1166 StackHandleScope<1> hs(self);
1167 std::string temp;
1168 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1169 while (!klass->IsObjectClass()) {
1170 const char* descriptor = klass->GetDescriptor(&temp);
1171 std::pair<std::unordered_set<std::string>::iterator, bool> result =
1172 image_classes->insert(descriptor);
1173 if (!result.second) { // Previously inserted.
1174 break;
1175 }
1176 VLOG(compiler) << "Adding " << descriptor << " to image classes";
1177 for (size_t i = 0, num_interfaces = klass->NumDirectInterfaces(); i != num_interfaces; ++i) {
1178 ObjPtr<mirror::Class> interface = mirror::Class::GetDirectInterface(self, klass, i);
1179 DCHECK(interface != nullptr);
1180 MaybeAddToImageClasses(self, interface, image_classes);
1181 }
1182 for (auto& m : klass->GetVirtualMethods(pointer_size)) {
1183 MaybeAddToImageClasses(self, m.GetDeclaringClass(), image_classes);
1184 }
1185 if (klass->IsArrayClass()) {
1186 MaybeAddToImageClasses(self, klass->GetComponentType(), image_classes);
1187 }
1188 klass.Assign(klass->GetSuperClass());
1189 }
1190 }
1191
1192 // Keeps all the data for the update together. Also doubles as the reference visitor.
1193 // Note: we can use object pointers because we suspend all threads.
1194 class ClinitImageUpdate {
1195 public:
Create(VariableSizedHandleScope & hs,std::unordered_set<std::string> * image_class_descriptors,Thread * self,ClassLinker * linker)1196 static ClinitImageUpdate* Create(VariableSizedHandleScope& hs,
1197 std::unordered_set<std::string>* image_class_descriptors,
1198 Thread* self,
1199 ClassLinker* linker) {
1200 std::unique_ptr<ClinitImageUpdate> res(new ClinitImageUpdate(hs,
1201 image_class_descriptors,
1202 self,
1203 linker));
1204 return res.release();
1205 }
1206
~ClinitImageUpdate()1207 ~ClinitImageUpdate() {
1208 // Allow others to suspend again.
1209 self_->EndAssertNoThreadSuspension(old_cause_);
1210 }
1211
1212 // Visitor for VisitReferences.
operator ()(ObjPtr<mirror::Object> object,MemberOffset field_offset,bool) const1213 void operator()(ObjPtr<mirror::Object> object,
1214 MemberOffset field_offset,
1215 bool /* is_static */) const
1216 REQUIRES_SHARED(Locks::mutator_lock_) {
1217 mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset);
1218 if (ref != nullptr) {
1219 VisitClinitClassesObject(ref);
1220 }
1221 }
1222
1223 // java.lang.ref.Reference visitor for VisitReferences.
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const1224 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1225 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const {}
1226
1227 // Ignore class native roots.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1228 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1229 const {}
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1230 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1231
Walk()1232 void Walk() REQUIRES_SHARED(Locks::mutator_lock_) {
1233 // Use the initial classes as roots for a search.
1234 for (Handle<mirror::Class> klass_root : image_classes_) {
1235 VisitClinitClassesObject(klass_root.Get());
1236 }
1237 Thread* self = Thread::Current();
1238 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
1239 for (Handle<mirror::Class> h_klass : to_insert_) {
1240 MaybeAddToImageClasses(self, h_klass.Get(), image_class_descriptors_);
1241 }
1242 }
1243
1244 private:
1245 class FindImageClassesVisitor : public ClassVisitor {
1246 public:
FindImageClassesVisitor(VariableSizedHandleScope & hs,ClinitImageUpdate * data)1247 explicit FindImageClassesVisitor(VariableSizedHandleScope& hs,
1248 ClinitImageUpdate* data)
1249 : data_(data),
1250 hs_(hs) {}
1251
operator ()(ObjPtr<mirror::Class> klass)1252 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
1253 std::string temp;
1254 const char* name = klass->GetDescriptor(&temp);
1255 if (data_->image_class_descriptors_->find(name) != data_->image_class_descriptors_->end()) {
1256 data_->image_classes_.push_back(hs_.NewHandle(klass));
1257 } else {
1258 // Check whether it is initialized and has a clinit. They must be kept, too.
1259 if (klass->IsInitialized() && klass->FindClassInitializer(
1260 Runtime::Current()->GetClassLinker()->GetImagePointerSize()) != nullptr) {
1261 data_->image_classes_.push_back(hs_.NewHandle(klass));
1262 }
1263 }
1264 return true;
1265 }
1266
1267 private:
1268 ClinitImageUpdate* const data_;
1269 VariableSizedHandleScope& hs_;
1270 };
1271
ClinitImageUpdate(VariableSizedHandleScope & hs,std::unordered_set<std::string> * image_class_descriptors,Thread * self,ClassLinker * linker)1272 ClinitImageUpdate(VariableSizedHandleScope& hs,
1273 std::unordered_set<std::string>* image_class_descriptors,
1274 Thread* self,
1275 ClassLinker* linker) REQUIRES_SHARED(Locks::mutator_lock_)
1276 : hs_(hs),
1277 image_class_descriptors_(image_class_descriptors),
1278 self_(self) {
1279 CHECK(linker != nullptr);
1280 CHECK(image_class_descriptors != nullptr);
1281
1282 // Make sure nobody interferes with us.
1283 old_cause_ = self->StartAssertNoThreadSuspension("Boot image closure");
1284
1285 // Find all the already-marked classes.
1286 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
1287 FindImageClassesVisitor visitor(hs_, this);
1288 linker->VisitClasses(&visitor);
1289 }
1290
VisitClinitClassesObject(mirror::Object * object) const1291 void VisitClinitClassesObject(mirror::Object* object) const
1292 REQUIRES_SHARED(Locks::mutator_lock_) {
1293 DCHECK(object != nullptr);
1294 if (marked_objects_.find(object) != marked_objects_.end()) {
1295 // Already processed.
1296 return;
1297 }
1298
1299 // Mark it.
1300 marked_objects_.insert(object);
1301
1302 if (object->IsClass()) {
1303 // Add to the TODO list since MaybeAddToImageClasses may cause thread suspension. Thread
1304 // suspensionb is not safe to do in VisitObjects or VisitReferences.
1305 to_insert_.push_back(hs_.NewHandle(object->AsClass()));
1306 } else {
1307 // Else visit the object's class.
1308 VisitClinitClassesObject(object->GetClass());
1309 }
1310
1311 // If it is not a DexCache, visit all references.
1312 if (!object->IsDexCache()) {
1313 object->VisitReferences(*this, *this);
1314 }
1315 }
1316
1317 VariableSizedHandleScope& hs_;
1318 mutable std::vector<Handle<mirror::Class>> to_insert_;
1319 mutable std::unordered_set<mirror::Object*> marked_objects_;
1320 std::unordered_set<std::string>* const image_class_descriptors_;
1321 std::vector<Handle<mirror::Class>> image_classes_;
1322 Thread* const self_;
1323 const char* old_cause_;
1324
1325 DISALLOW_COPY_AND_ASSIGN(ClinitImageUpdate);
1326 };
1327
UpdateImageClasses(TimingLogger * timings)1328 void CompilerDriver::UpdateImageClasses(TimingLogger* timings) {
1329 if (GetCompilerOptions().IsBootImage()) {
1330 TimingLogger::ScopedTiming t("UpdateImageClasses", timings);
1331
1332 Runtime* runtime = Runtime::Current();
1333
1334 // Suspend all threads.
1335 ScopedSuspendAll ssa(__FUNCTION__);
1336
1337 VariableSizedHandleScope hs(Thread::Current());
1338 std::string error_msg;
1339 std::unique_ptr<ClinitImageUpdate> update(ClinitImageUpdate::Create(hs,
1340 image_classes_.get(),
1341 Thread::Current(),
1342 runtime->GetClassLinker()));
1343
1344 // Do the marking.
1345 update->Walk();
1346 }
1347 }
1348
CanAssumeClassIsLoaded(mirror::Class * klass)1349 bool CompilerDriver::CanAssumeClassIsLoaded(mirror::Class* klass) {
1350 Runtime* runtime = Runtime::Current();
1351 if (!runtime->IsAotCompiler()) {
1352 DCHECK(runtime->UseJitCompilation());
1353 // Having the klass reference here implies that the klass is already loaded.
1354 return true;
1355 }
1356 if (!GetCompilerOptions().IsBootImage()) {
1357 // Assume loaded only if klass is in the boot image. App classes cannot be assumed
1358 // loaded because we don't even know what class loader will be used to load them.
1359 bool class_in_image = runtime->GetHeap()->FindSpaceFromObject(klass, false)->IsImageSpace();
1360 return class_in_image;
1361 }
1362 std::string temp;
1363 const char* descriptor = klass->GetDescriptor(&temp);
1364 return IsImageClass(descriptor);
1365 }
1366
CanAccessTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class,ObjPtr<mirror::Class> resolved_class)1367 bool CompilerDriver::CanAccessTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class,
1368 ObjPtr<mirror::Class> resolved_class) {
1369 if (resolved_class == nullptr) {
1370 stats_->TypeNeedsAccessCheck();
1371 return false; // Unknown class needs access checks.
1372 }
1373 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible.
1374 if (!is_accessible) {
1375 if (referrer_class == nullptr) {
1376 stats_->TypeNeedsAccessCheck();
1377 return false; // Incomplete referrer knowledge needs access check.
1378 }
1379 // Perform access check, will return true if access is ok or false if we're going to have to
1380 // check this at runtime (for example for class loaders).
1381 is_accessible = referrer_class->CanAccess(resolved_class);
1382 }
1383 if (is_accessible) {
1384 stats_->TypeDoesntNeedAccessCheck();
1385 } else {
1386 stats_->TypeNeedsAccessCheck();
1387 }
1388 return is_accessible;
1389 }
1390
CanAccessInstantiableTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class,ObjPtr<mirror::Class> resolved_class,bool * finalizable)1391 bool CompilerDriver::CanAccessInstantiableTypeWithoutChecks(ObjPtr<mirror::Class> referrer_class,
1392 ObjPtr<mirror::Class> resolved_class,
1393 bool* finalizable) {
1394 if (resolved_class == nullptr) {
1395 stats_->TypeNeedsAccessCheck();
1396 // Be conservative.
1397 *finalizable = true;
1398 return false; // Unknown class needs access checks.
1399 }
1400 *finalizable = resolved_class->IsFinalizable();
1401 bool is_accessible = resolved_class->IsPublic(); // Public classes are always accessible.
1402 if (!is_accessible) {
1403 if (referrer_class == nullptr) {
1404 stats_->TypeNeedsAccessCheck();
1405 return false; // Incomplete referrer knowledge needs access check.
1406 }
1407 // Perform access and instantiable checks, will return true if access is ok or false if we're
1408 // going to have to check this at runtime (for example for class loaders).
1409 is_accessible = referrer_class->CanAccess(resolved_class);
1410 }
1411 bool result = is_accessible && resolved_class->IsInstantiable();
1412 if (result) {
1413 stats_->TypeDoesntNeedAccessCheck();
1414 } else {
1415 stats_->TypeNeedsAccessCheck();
1416 }
1417 return result;
1418 }
1419
ProcessedInstanceField(bool resolved)1420 void CompilerDriver::ProcessedInstanceField(bool resolved) {
1421 if (!resolved) {
1422 stats_->UnresolvedInstanceField();
1423 } else {
1424 stats_->ResolvedInstanceField();
1425 }
1426 }
1427
ProcessedStaticField(bool resolved,bool local)1428 void CompilerDriver::ProcessedStaticField(bool resolved, bool local) {
1429 if (!resolved) {
1430 stats_->UnresolvedStaticField();
1431 } else if (local) {
1432 stats_->ResolvedLocalStaticField();
1433 } else {
1434 stats_->ResolvedStaticField();
1435 }
1436 }
1437
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,const ScopedObjectAccess & soa)1438 ArtField* CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx,
1439 const DexCompilationUnit* mUnit,
1440 bool is_put,
1441 const ScopedObjectAccess& soa) {
1442 // Try to resolve the field and compiling method's class.
1443 ArtField* resolved_field;
1444 ObjPtr<mirror::Class> referrer_class;
1445 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache());
1446 {
1447 Handle<mirror::ClassLoader> class_loader = mUnit->GetClassLoader();
1448 resolved_field = ResolveField(soa, dex_cache, class_loader, field_idx, /* is_static */ false);
1449 referrer_class = resolved_field != nullptr
1450 ? ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit) : nullptr;
1451 }
1452 bool can_link = false;
1453 if (resolved_field != nullptr && referrer_class != nullptr) {
1454 std::pair<bool, bool> fast_path = IsFastInstanceField(
1455 dex_cache.Get(), referrer_class, resolved_field, field_idx);
1456 can_link = is_put ? fast_path.second : fast_path.first;
1457 }
1458 ProcessedInstanceField(can_link);
1459 return can_link ? resolved_field : nullptr;
1460 }
1461
ComputeInstanceFieldInfo(uint32_t field_idx,const DexCompilationUnit * mUnit,bool is_put,MemberOffset * field_offset,bool * is_volatile)1462 bool CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, const DexCompilationUnit* mUnit,
1463 bool is_put, MemberOffset* field_offset,
1464 bool* is_volatile) {
1465 ScopedObjectAccess soa(Thread::Current());
1466 ArtField* resolved_field = ComputeInstanceFieldInfo(field_idx, mUnit, is_put, soa);
1467
1468 if (resolved_field == nullptr) {
1469 // Conservative defaults.
1470 *is_volatile = true;
1471 *field_offset = MemberOffset(static_cast<size_t>(-1));
1472 return false;
1473 } else {
1474 *is_volatile = resolved_field->IsVolatile();
1475 *field_offset = resolved_field->GetOffset();
1476 return true;
1477 }
1478 }
1479
GetVerifiedMethod(const DexFile * dex_file,uint32_t method_idx) const1480 const VerifiedMethod* CompilerDriver::GetVerifiedMethod(const DexFile* dex_file,
1481 uint32_t method_idx) const {
1482 MethodReference ref(dex_file, method_idx);
1483 return verification_results_->GetVerifiedMethod(ref);
1484 }
1485
IsSafeCast(const DexCompilationUnit * mUnit,uint32_t dex_pc)1486 bool CompilerDriver::IsSafeCast(const DexCompilationUnit* mUnit, uint32_t dex_pc) {
1487 if (!compiler_options_->IsVerificationEnabled()) {
1488 // If we didn't verify, every cast has to be treated as non-safe.
1489 return false;
1490 }
1491 DCHECK(mUnit->GetVerifiedMethod() != nullptr);
1492 bool result = mUnit->GetVerifiedMethod()->IsSafeCast(dex_pc);
1493 if (result) {
1494 stats_->SafeCast();
1495 } else {
1496 stats_->NotASafeCast();
1497 }
1498 return result;
1499 }
1500
1501 class CompilationVisitor {
1502 public:
~CompilationVisitor()1503 virtual ~CompilationVisitor() {}
1504 virtual void Visit(size_t index) = 0;
1505 };
1506
1507 class ParallelCompilationManager {
1508 public:
ParallelCompilationManager(ClassLinker * class_linker,jobject class_loader,CompilerDriver * compiler,const DexFile * dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool)1509 ParallelCompilationManager(ClassLinker* class_linker,
1510 jobject class_loader,
1511 CompilerDriver* compiler,
1512 const DexFile* dex_file,
1513 const std::vector<const DexFile*>& dex_files,
1514 ThreadPool* thread_pool)
1515 : index_(0),
1516 class_linker_(class_linker),
1517 class_loader_(class_loader),
1518 compiler_(compiler),
1519 dex_file_(dex_file),
1520 dex_files_(dex_files),
1521 thread_pool_(thread_pool) {}
1522
GetClassLinker() const1523 ClassLinker* GetClassLinker() const {
1524 CHECK(class_linker_ != nullptr);
1525 return class_linker_;
1526 }
1527
GetClassLoader() const1528 jobject GetClassLoader() const {
1529 return class_loader_;
1530 }
1531
GetCompiler() const1532 CompilerDriver* GetCompiler() const {
1533 CHECK(compiler_ != nullptr);
1534 return compiler_;
1535 }
1536
GetDexFile() const1537 const DexFile* GetDexFile() const {
1538 CHECK(dex_file_ != nullptr);
1539 return dex_file_;
1540 }
1541
GetDexFiles() const1542 const std::vector<const DexFile*>& GetDexFiles() const {
1543 return dex_files_;
1544 }
1545
ForAll(size_t begin,size_t end,CompilationVisitor * visitor,size_t work_units)1546 void ForAll(size_t begin, size_t end, CompilationVisitor* visitor, size_t work_units)
1547 REQUIRES(!*Locks::mutator_lock_) {
1548 ForAllLambda(begin, end, [visitor](size_t index) { visitor->Visit(index); }, work_units);
1549 }
1550
1551 template <typename Fn>
ForAllLambda(size_t begin,size_t end,Fn fn,size_t work_units)1552 void ForAllLambda(size_t begin, size_t end, Fn fn, size_t work_units)
1553 REQUIRES(!*Locks::mutator_lock_) {
1554 Thread* self = Thread::Current();
1555 self->AssertNoPendingException();
1556 CHECK_GT(work_units, 0U);
1557
1558 index_.StoreRelaxed(begin);
1559 for (size_t i = 0; i < work_units; ++i) {
1560 thread_pool_->AddTask(self, new ForAllClosureLambda<Fn>(this, end, fn));
1561 }
1562 thread_pool_->StartWorkers(self);
1563
1564 // Ensure we're suspended while we're blocked waiting for the other threads to finish (worker
1565 // thread destructor's called below perform join).
1566 CHECK_NE(self->GetState(), kRunnable);
1567
1568 // Wait for all the worker threads to finish.
1569 thread_pool_->Wait(self, true, false);
1570
1571 // And stop the workers accepting jobs.
1572 thread_pool_->StopWorkers(self);
1573 }
1574
NextIndex()1575 size_t NextIndex() {
1576 return index_.FetchAndAddSequentiallyConsistent(1);
1577 }
1578
1579 private:
1580 template <typename Fn>
1581 class ForAllClosureLambda : public Task {
1582 public:
ForAllClosureLambda(ParallelCompilationManager * manager,size_t end,Fn fn)1583 ForAllClosureLambda(ParallelCompilationManager* manager, size_t end, Fn fn)
1584 : manager_(manager),
1585 end_(end),
1586 fn_(fn) {}
1587
Run(Thread * self)1588 void Run(Thread* self) OVERRIDE {
1589 while (true) {
1590 const size_t index = manager_->NextIndex();
1591 if (UNLIKELY(index >= end_)) {
1592 break;
1593 }
1594 fn_(index);
1595 self->AssertNoPendingException();
1596 }
1597 }
1598
Finalize()1599 void Finalize() OVERRIDE {
1600 delete this;
1601 }
1602
1603 private:
1604 ParallelCompilationManager* const manager_;
1605 const size_t end_;
1606 Fn fn_;
1607 };
1608
1609 AtomicInteger index_;
1610 ClassLinker* const class_linker_;
1611 const jobject class_loader_;
1612 CompilerDriver* const compiler_;
1613 const DexFile* const dex_file_;
1614 const std::vector<const DexFile*>& dex_files_;
1615 ThreadPool* const thread_pool_;
1616
1617 DISALLOW_COPY_AND_ASSIGN(ParallelCompilationManager);
1618 };
1619
1620 // A fast version of SkipClass above if the class pointer is available
1621 // that avoids the expensive FindInClassPath search.
SkipClass(jobject class_loader,const DexFile & dex_file,ObjPtr<mirror::Class> klass)1622 static bool SkipClass(jobject class_loader, const DexFile& dex_file, ObjPtr<mirror::Class> klass)
1623 REQUIRES_SHARED(Locks::mutator_lock_) {
1624 DCHECK(klass != nullptr);
1625 const DexFile& original_dex_file = *klass->GetDexCache()->GetDexFile();
1626 if (&dex_file != &original_dex_file) {
1627 if (class_loader == nullptr) {
1628 LOG(WARNING) << "Skipping class " << klass->PrettyDescriptor() << " from "
1629 << dex_file.GetLocation() << " previously found in "
1630 << original_dex_file.GetLocation();
1631 }
1632 return true;
1633 }
1634 return false;
1635 }
1636
CheckAndClearResolveException(Thread * self)1637 static void CheckAndClearResolveException(Thread* self)
1638 REQUIRES_SHARED(Locks::mutator_lock_) {
1639 CHECK(self->IsExceptionPending());
1640 mirror::Throwable* exception = self->GetException();
1641 std::string temp;
1642 const char* descriptor = exception->GetClass()->GetDescriptor(&temp);
1643 const char* expected_exceptions[] = {
1644 "Ljava/lang/IllegalAccessError;",
1645 "Ljava/lang/IncompatibleClassChangeError;",
1646 "Ljava/lang/InstantiationError;",
1647 "Ljava/lang/LinkageError;",
1648 "Ljava/lang/NoClassDefFoundError;",
1649 "Ljava/lang/NoSuchFieldError;",
1650 "Ljava/lang/NoSuchMethodError;"
1651 };
1652 bool found = false;
1653 for (size_t i = 0; (found == false) && (i < arraysize(expected_exceptions)); ++i) {
1654 if (strcmp(descriptor, expected_exceptions[i]) == 0) {
1655 found = true;
1656 }
1657 }
1658 if (!found) {
1659 LOG(FATAL) << "Unexpected exception " << exception->Dump();
1660 }
1661 self->ClearException();
1662 }
1663
RequiresConstructorBarrier(const DexFile & dex_file,uint16_t class_def_idx) const1664 bool CompilerDriver::RequiresConstructorBarrier(const DexFile& dex_file,
1665 uint16_t class_def_idx) const {
1666 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_idx);
1667 const uint8_t* class_data = dex_file.GetClassData(class_def);
1668 if (class_data == nullptr) {
1669 // Empty class such as a marker interface.
1670 return false;
1671 }
1672 ClassDataItemIterator it(dex_file, class_data);
1673 it.SkipStaticFields();
1674 // We require a constructor barrier if there are final instance fields.
1675 while (it.HasNextInstanceField()) {
1676 if (it.MemberIsFinal()) {
1677 return true;
1678 }
1679 it.Next();
1680 }
1681 return false;
1682 }
1683
1684 class ResolveClassFieldsAndMethodsVisitor : public CompilationVisitor {
1685 public:
ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager * manager)1686 explicit ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager* manager)
1687 : manager_(manager) {}
1688
Visit(size_t class_def_index)1689 void Visit(size_t class_def_index) OVERRIDE REQUIRES(!Locks::mutator_lock_) {
1690 ScopedTrace trace(__FUNCTION__);
1691 Thread* const self = Thread::Current();
1692 jobject jclass_loader = manager_->GetClassLoader();
1693 const DexFile& dex_file = *manager_->GetDexFile();
1694 ClassLinker* class_linker = manager_->GetClassLinker();
1695
1696 // If an instance field is final then we need to have a barrier on the return, static final
1697 // fields are assigned within the lock held for class initialization. Conservatively assume
1698 // constructor barriers are always required.
1699 bool requires_constructor_barrier = true;
1700
1701 // Method and Field are the worst. We can't resolve without either
1702 // context from the code use (to disambiguate virtual vs direct
1703 // method and instance vs static field) or from class
1704 // definitions. While the compiler will resolve what it can as it
1705 // needs it, here we try to resolve fields and methods used in class
1706 // definitions, since many of them many never be referenced by
1707 // generated code.
1708 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
1709 ScopedObjectAccess soa(self);
1710 StackHandleScope<2> hs(soa.Self());
1711 Handle<mirror::ClassLoader> class_loader(
1712 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1713 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
1714 soa.Self(), dex_file)));
1715 // Resolve the class.
1716 ObjPtr<mirror::Class> klass =
1717 class_linker->ResolveType(class_def.class_idx_, dex_cache, class_loader);
1718 bool resolve_fields_and_methods;
1719 if (klass == nullptr) {
1720 // Class couldn't be resolved, for example, super-class is in a different dex file. Don't
1721 // attempt to resolve methods and fields when there is no declaring class.
1722 CheckAndClearResolveException(soa.Self());
1723 resolve_fields_and_methods = false;
1724 } else {
1725 // We successfully resolved a class, should we skip it?
1726 if (SkipClass(jclass_loader, dex_file, klass)) {
1727 return;
1728 }
1729 // We want to resolve the methods and fields eagerly.
1730 resolve_fields_and_methods = true;
1731 }
1732 // Note the class_data pointer advances through the headers,
1733 // static fields, instance fields, direct methods, and virtual
1734 // methods.
1735 const uint8_t* class_data = dex_file.GetClassData(class_def);
1736 if (class_data == nullptr) {
1737 // Empty class such as a marker interface.
1738 requires_constructor_barrier = false;
1739 } else {
1740 ClassDataItemIterator it(dex_file, class_data);
1741 while (it.HasNextStaticField()) {
1742 if (resolve_fields_and_methods) {
1743 ArtField* field = class_linker->ResolveField(
1744 it.GetMemberIndex(), dex_cache, class_loader, /* is_static */ true);
1745 if (field == nullptr) {
1746 CheckAndClearResolveException(soa.Self());
1747 }
1748 }
1749 it.Next();
1750 }
1751 // We require a constructor barrier if there are final instance fields.
1752 requires_constructor_barrier = false;
1753 while (it.HasNextInstanceField()) {
1754 if (it.MemberIsFinal()) {
1755 requires_constructor_barrier = true;
1756 }
1757 if (resolve_fields_and_methods) {
1758 ArtField* field = class_linker->ResolveField(
1759 it.GetMemberIndex(), dex_cache, class_loader, /* is_static */ false);
1760 if (field == nullptr) {
1761 CheckAndClearResolveException(soa.Self());
1762 }
1763 }
1764 it.Next();
1765 }
1766 if (resolve_fields_and_methods) {
1767 while (it.HasNextMethod()) {
1768 ArtMethod* method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
1769 it.GetMemberIndex(),
1770 dex_cache,
1771 class_loader,
1772 /* referrer */ nullptr,
1773 it.GetMethodInvokeType(class_def));
1774 if (method == nullptr) {
1775 CheckAndClearResolveException(soa.Self());
1776 }
1777 it.Next();
1778 }
1779 DCHECK(!it.HasNext());
1780 }
1781 }
1782 manager_->GetCompiler()->SetRequiresConstructorBarrier(self,
1783 &dex_file,
1784 class_def_index,
1785 requires_constructor_barrier);
1786 }
1787
1788 private:
1789 const ParallelCompilationManager* const manager_;
1790 };
1791
1792 class ResolveTypeVisitor : public CompilationVisitor {
1793 public:
ResolveTypeVisitor(const ParallelCompilationManager * manager)1794 explicit ResolveTypeVisitor(const ParallelCompilationManager* manager) : manager_(manager) {
1795 }
Visit(size_t type_idx)1796 void Visit(size_t type_idx) OVERRIDE REQUIRES(!Locks::mutator_lock_) {
1797 // Class derived values are more complicated, they require the linker and loader.
1798 ScopedObjectAccess soa(Thread::Current());
1799 ClassLinker* class_linker = manager_->GetClassLinker();
1800 const DexFile& dex_file = *manager_->GetDexFile();
1801 StackHandleScope<2> hs(soa.Self());
1802 Handle<mirror::ClassLoader> class_loader(
1803 hs.NewHandle(soa.Decode<mirror::ClassLoader>(manager_->GetClassLoader())));
1804 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->RegisterDexFile(
1805 dex_file,
1806 class_loader.Get())));
1807 ObjPtr<mirror::Class> klass = (dex_cache != nullptr)
1808 ? class_linker->ResolveType(dex::TypeIndex(type_idx), dex_cache, class_loader)
1809 : nullptr;
1810
1811 if (klass == nullptr) {
1812 soa.Self()->AssertPendingException();
1813 mirror::Throwable* exception = soa.Self()->GetException();
1814 VLOG(compiler) << "Exception during type resolution: " << exception->Dump();
1815 if (exception->GetClass()->DescriptorEquals("Ljava/lang/OutOfMemoryError;")) {
1816 // There's little point continuing compilation if the heap is exhausted.
1817 LOG(FATAL) << "Out of memory during type resolution for compilation";
1818 }
1819 soa.Self()->ClearException();
1820 }
1821 }
1822
1823 private:
1824 const ParallelCompilationManager* const manager_;
1825 };
1826
ResolveDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)1827 void CompilerDriver::ResolveDexFile(jobject class_loader,
1828 const DexFile& dex_file,
1829 const std::vector<const DexFile*>& dex_files,
1830 ThreadPool* thread_pool,
1831 size_t thread_count,
1832 TimingLogger* timings) {
1833 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1834
1835 // TODO: we could resolve strings here, although the string table is largely filled with class
1836 // and method names.
1837
1838 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
1839 thread_pool);
1840 if (GetCompilerOptions().IsBootImage()) {
1841 // For images we resolve all types, such as array, whereas for applications just those with
1842 // classdefs are resolved by ResolveClassFieldsAndMethods.
1843 TimingLogger::ScopedTiming t("Resolve Types", timings);
1844 ResolveTypeVisitor visitor(&context);
1845 context.ForAll(0, dex_file.NumTypeIds(), &visitor, thread_count);
1846 }
1847
1848 TimingLogger::ScopedTiming t("Resolve MethodsAndFields", timings);
1849 ResolveClassFieldsAndMethodsVisitor visitor(&context);
1850 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
1851 }
1852
SetVerified(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1853 void CompilerDriver::SetVerified(jobject class_loader,
1854 const std::vector<const DexFile*>& dex_files,
1855 TimingLogger* timings) {
1856 // This can be run in parallel.
1857 for (const DexFile* dex_file : dex_files) {
1858 CHECK(dex_file != nullptr);
1859 SetVerifiedDexFile(class_loader,
1860 *dex_file,
1861 dex_files,
1862 parallel_thread_pool_.get(),
1863 parallel_thread_count_,
1864 timings);
1865 }
1866 }
1867
PopulateVerifiedMethods(const DexFile & dex_file,uint32_t class_def_index,VerificationResults * verification_results)1868 static void PopulateVerifiedMethods(const DexFile& dex_file,
1869 uint32_t class_def_index,
1870 VerificationResults* verification_results) {
1871 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
1872 const uint8_t* class_data = dex_file.GetClassData(class_def);
1873 if (class_data == nullptr) {
1874 return;
1875 }
1876 ClassDataItemIterator it(dex_file, class_data);
1877 it.SkipAllFields();
1878
1879 while (it.HasNextMethod()) {
1880 verification_results->CreateVerifiedMethodFor(MethodReference(&dex_file, it.GetMemberIndex()));
1881 it.Next();
1882 }
1883 DCHECK(!it.HasNext());
1884 }
1885
LoadAndUpdateStatus(const DexFile & dex_file,const DexFile::ClassDef & class_def,ClassStatus status,Handle<mirror::ClassLoader> class_loader,Thread * self)1886 static void LoadAndUpdateStatus(const DexFile& dex_file,
1887 const DexFile::ClassDef& class_def,
1888 ClassStatus status,
1889 Handle<mirror::ClassLoader> class_loader,
1890 Thread* self)
1891 REQUIRES_SHARED(Locks::mutator_lock_) {
1892 StackHandleScope<1> hs(self);
1893 const char* descriptor = dex_file.GetClassDescriptor(class_def);
1894 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1895 Handle<mirror::Class> cls(hs.NewHandle<mirror::Class>(
1896 class_linker->FindClass(self, descriptor, class_loader)));
1897 if (cls != nullptr) {
1898 // Check that the class is resolved with the current dex file. We might get
1899 // a boot image class, or a class in a different dex file for multidex, and
1900 // we should not update the status in that case.
1901 if (&cls->GetDexFile() == &dex_file) {
1902 ObjectLock<mirror::Class> lock(self, cls);
1903 mirror::Class::SetStatus(cls, status, self);
1904 }
1905 } else {
1906 DCHECK(self->IsExceptionPending());
1907 self->ClearException();
1908 }
1909 }
1910
FastVerify(jobject jclass_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1911 bool CompilerDriver::FastVerify(jobject jclass_loader,
1912 const std::vector<const DexFile*>& dex_files,
1913 TimingLogger* timings) {
1914 verifier::VerifierDeps* verifier_deps =
1915 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps();
1916 // If there exist VerifierDeps that aren't the ones we just created to output, use them to verify.
1917 if (verifier_deps == nullptr || verifier_deps->OutputOnly()) {
1918 return false;
1919 }
1920 TimingLogger::ScopedTiming t("Fast Verify", timings);
1921 ScopedObjectAccess soa(Thread::Current());
1922 StackHandleScope<2> hs(soa.Self());
1923 Handle<mirror::ClassLoader> class_loader(
1924 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1925 if (!verifier_deps->ValidateDependencies(class_loader, soa.Self())) {
1926 return false;
1927 }
1928
1929 bool compiler_only_verifies = !GetCompilerOptions().IsAnyCompilationEnabled();
1930
1931 // We successfully validated the dependencies, now update class status
1932 // of verified classes. Note that the dependencies also record which classes
1933 // could not be fully verified; we could try again, but that would hurt verification
1934 // time. So instead we assume these classes still need to be verified at
1935 // runtime.
1936 for (const DexFile* dex_file : dex_files) {
1937 // Fetch the list of unverified classes.
1938 const std::set<dex::TypeIndex>& unverified_classes =
1939 verifier_deps->GetUnverifiedClasses(*dex_file);
1940 for (uint32_t i = 0; i < dex_file->NumClassDefs(); ++i) {
1941 const DexFile::ClassDef& class_def = dex_file->GetClassDef(i);
1942 if (unverified_classes.find(class_def.class_idx_) == unverified_classes.end()) {
1943 if (compiler_only_verifies) {
1944 // Just update the compiled_classes_ map. The compiler doesn't need to resolve
1945 // the type.
1946 ClassReference ref(dex_file, i);
1947 ClassStatus existing = ClassStatus::kNotReady;
1948 DCHECK(compiled_classes_.Get(ref, &existing)) << ref.dex_file->GetLocation();
1949 ClassStateTable::InsertResult result =
1950 compiled_classes_.Insert(ref, existing, ClassStatus::kVerified);
1951 CHECK_EQ(result, ClassStateTable::kInsertResultSuccess);
1952 } else {
1953 // Update the class status, so later compilation stages know they don't need to verify
1954 // the class.
1955 LoadAndUpdateStatus(
1956 *dex_file, class_def, ClassStatus::kVerified, class_loader, soa.Self());
1957 // Create `VerifiedMethod`s for each methods, the compiler expects one for
1958 // quickening or compiling.
1959 // Note that this means:
1960 // - We're only going to compile methods that did verify.
1961 // - Quickening will not do checkcast ellision.
1962 // TODO(ngeoffray): Reconsider this once we refactor compiler filters.
1963 PopulateVerifiedMethods(*dex_file, i, verification_results_);
1964 }
1965 } else if (!compiler_only_verifies) {
1966 // Make sure later compilation stages know they should not try to verify
1967 // this class again.
1968 LoadAndUpdateStatus(*dex_file,
1969 class_def,
1970 ClassStatus::kRetryVerificationAtRuntime,
1971 class_loader,
1972 soa.Self());
1973 }
1974 }
1975 }
1976 return true;
1977 }
1978
Verify(jobject jclass_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)1979 void CompilerDriver::Verify(jobject jclass_loader,
1980 const std::vector<const DexFile*>& dex_files,
1981 TimingLogger* timings) {
1982 if (FastVerify(jclass_loader, dex_files, timings)) {
1983 return;
1984 }
1985
1986 // If there is no existing `verifier_deps` (because of non-existing vdex), or
1987 // the existing `verifier_deps` is not valid anymore, create a new one for
1988 // non boot image compilation. The verifier will need it to record the new dependencies.
1989 // Then dex2oat can update the vdex file with these new dependencies.
1990 if (!GetCompilerOptions().IsBootImage()) {
1991 // Dex2oat creates the verifier deps.
1992 // Create the main VerifierDeps, and set it to this thread.
1993 verifier::VerifierDeps* verifier_deps =
1994 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps();
1995 CHECK(verifier_deps != nullptr);
1996 Thread::Current()->SetVerifierDeps(verifier_deps);
1997 // Create per-thread VerifierDeps to avoid contention on the main one.
1998 // We will merge them after verification.
1999 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) {
2000 worker->GetThread()->SetVerifierDeps(new verifier::VerifierDeps(dex_files_for_oat_file_));
2001 }
2002 }
2003
2004 // Verification updates VerifierDeps and needs to run single-threaded to be deterministic.
2005 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
2006 ThreadPool* verify_thread_pool =
2007 force_determinism ? single_thread_pool_.get() : parallel_thread_pool_.get();
2008 size_t verify_thread_count = force_determinism ? 1U : parallel_thread_count_;
2009 for (const DexFile* dex_file : dex_files) {
2010 CHECK(dex_file != nullptr);
2011 VerifyDexFile(jclass_loader,
2012 *dex_file,
2013 dex_files,
2014 verify_thread_pool,
2015 verify_thread_count,
2016 timings);
2017 }
2018
2019 if (!GetCompilerOptions().IsBootImage()) {
2020 // Merge all VerifierDeps into the main one.
2021 verifier::VerifierDeps* verifier_deps = Thread::Current()->GetVerifierDeps();
2022 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) {
2023 verifier::VerifierDeps* thread_deps = worker->GetThread()->GetVerifierDeps();
2024 worker->GetThread()->SetVerifierDeps(nullptr);
2025 verifier_deps->MergeWith(*thread_deps, dex_files_for_oat_file_);
2026 delete thread_deps;
2027 }
2028 Thread::Current()->SetVerifierDeps(nullptr);
2029 }
2030 }
2031
2032 class VerifyClassVisitor : public CompilationVisitor {
2033 public:
VerifyClassVisitor(const ParallelCompilationManager * manager,verifier::HardFailLogMode log_level)2034 VerifyClassVisitor(const ParallelCompilationManager* manager, verifier::HardFailLogMode log_level)
2035 : manager_(manager), log_level_(log_level) {}
2036
Visit(size_t class_def_index)2037 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2038 ScopedTrace trace(__FUNCTION__);
2039 ScopedObjectAccess soa(Thread::Current());
2040 const DexFile& dex_file = *manager_->GetDexFile();
2041 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2042 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2043 ClassLinker* class_linker = manager_->GetClassLinker();
2044 jobject jclass_loader = manager_->GetClassLoader();
2045 StackHandleScope<3> hs(soa.Self());
2046 Handle<mirror::ClassLoader> class_loader(
2047 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2048 Handle<mirror::Class> klass(
2049 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2050 verifier::FailureKind failure_kind;
2051 if (klass == nullptr) {
2052 CHECK(soa.Self()->IsExceptionPending());
2053 soa.Self()->ClearException();
2054
2055 /*
2056 * At compile time, we can still structurally verify the class even if FindClass fails.
2057 * This is to ensure the class is structurally sound for compilation. An unsound class
2058 * will be rejected by the verifier and later skipped during compilation in the compiler.
2059 */
2060 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache(
2061 soa.Self(), dex_file)));
2062 std::string error_msg;
2063 failure_kind =
2064 verifier::MethodVerifier::VerifyClass(soa.Self(),
2065 &dex_file,
2066 dex_cache,
2067 class_loader,
2068 class_def,
2069 Runtime::Current()->GetCompilerCallbacks(),
2070 true /* allow soft failures */,
2071 log_level_,
2072 &error_msg);
2073 if (failure_kind == verifier::FailureKind::kHardFailure) {
2074 LOG(ERROR) << "Verification failed on class " << PrettyDescriptor(descriptor)
2075 << " because: " << error_msg;
2076 manager_->GetCompiler()->SetHadHardVerifierFailure();
2077 } else if (failure_kind == verifier::FailureKind::kSoftFailure) {
2078 manager_->GetCompiler()->AddSoftVerifierFailure();
2079 } else {
2080 // Force a soft failure for the VerifierDeps. This is a sanity measure, as
2081 // the vdex file already records that the class hasn't been resolved. It avoids
2082 // trying to do future verification optimizations when processing the vdex file.
2083 DCHECK(failure_kind == verifier::FailureKind::kNoFailure) << failure_kind;
2084 failure_kind = verifier::FailureKind::kSoftFailure;
2085 }
2086 } else if (!SkipClass(jclass_loader, dex_file, klass.Get())) {
2087 CHECK(klass->IsResolved()) << klass->PrettyClass();
2088 failure_kind = class_linker->VerifyClass(soa.Self(), klass, log_level_);
2089
2090 if (klass->IsErroneous()) {
2091 // ClassLinker::VerifyClass throws, which isn't useful in the compiler.
2092 CHECK(soa.Self()->IsExceptionPending());
2093 soa.Self()->ClearException();
2094 manager_->GetCompiler()->SetHadHardVerifierFailure();
2095 } else if (failure_kind == verifier::FailureKind::kSoftFailure) {
2096 manager_->GetCompiler()->AddSoftVerifierFailure();
2097 }
2098
2099 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerified() || klass->IsErroneous())
2100 << klass->PrettyDescriptor() << ": state=" << klass->GetStatus();
2101
2102 // Class has a meaningful status for the compiler now, record it.
2103 ClassReference ref(manager_->GetDexFile(), class_def_index);
2104 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus());
2105
2106 // It is *very* problematic if there are resolution errors in the boot classpath.
2107 //
2108 // It is also bad if classes fail verification. For example, we rely on things working
2109 // OK without verification when the decryption dialog is brought up. It is thus highly
2110 // recommended to compile the boot classpath with
2111 // --abort-on-hard-verifier-error --abort-on-soft-verifier-error
2112 // which is the default build system configuration.
2113 if (kIsDebugBuild) {
2114 if (manager_->GetCompiler()->GetCompilerOptions().IsBootImage()) {
2115 if (!klass->IsResolved() || klass->IsErroneous()) {
2116 LOG(FATAL) << "Boot classpath class " << klass->PrettyClass()
2117 << " failed to resolve/is erroneous: state= " << klass->GetStatus();
2118 UNREACHABLE();
2119 }
2120 }
2121 if (klass->IsVerified()) {
2122 DCHECK_EQ(failure_kind, verifier::FailureKind::kNoFailure);
2123 } else if (klass->ShouldVerifyAtRuntime()) {
2124 DCHECK_EQ(failure_kind, verifier::FailureKind::kSoftFailure);
2125 } else {
2126 DCHECK_EQ(failure_kind, verifier::FailureKind::kHardFailure);
2127 }
2128 }
2129 } else {
2130 // Make the skip a soft failure, essentially being considered as verify at runtime.
2131 failure_kind = verifier::FailureKind::kSoftFailure;
2132 }
2133 verifier::VerifierDeps::MaybeRecordVerificationStatus(
2134 dex_file, class_def.class_idx_, failure_kind);
2135 soa.Self()->AssertNoPendingException();
2136 }
2137
2138 private:
2139 const ParallelCompilationManager* const manager_;
2140 const verifier::HardFailLogMode log_level_;
2141 };
2142
VerifyDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2143 void CompilerDriver::VerifyDexFile(jobject class_loader,
2144 const DexFile& dex_file,
2145 const std::vector<const DexFile*>& dex_files,
2146 ThreadPool* thread_pool,
2147 size_t thread_count,
2148 TimingLogger* timings) {
2149 TimingLogger::ScopedTiming t("Verify Dex File", timings);
2150 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2151 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
2152 thread_pool);
2153 bool abort_on_verifier_failures = GetCompilerOptions().AbortOnHardVerifierFailure()
2154 || GetCompilerOptions().AbortOnSoftVerifierFailure();
2155 verifier::HardFailLogMode log_level = abort_on_verifier_failures
2156 ? verifier::HardFailLogMode::kLogInternalFatal
2157 : verifier::HardFailLogMode::kLogWarning;
2158 VerifyClassVisitor visitor(&context, log_level);
2159 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2160 }
2161
2162 class SetVerifiedClassVisitor : public CompilationVisitor {
2163 public:
SetVerifiedClassVisitor(const ParallelCompilationManager * manager)2164 explicit SetVerifiedClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2165
Visit(size_t class_def_index)2166 virtual void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) OVERRIDE {
2167 ScopedTrace trace(__FUNCTION__);
2168 ScopedObjectAccess soa(Thread::Current());
2169 const DexFile& dex_file = *manager_->GetDexFile();
2170 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2171 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2172 ClassLinker* class_linker = manager_->GetClassLinker();
2173 jobject jclass_loader = manager_->GetClassLoader();
2174 StackHandleScope<3> hs(soa.Self());
2175 Handle<mirror::ClassLoader> class_loader(
2176 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2177 Handle<mirror::Class> klass(
2178 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2179 // Class might have failed resolution. Then don't set it to verified.
2180 if (klass != nullptr) {
2181 // Only do this if the class is resolved. If even resolution fails, quickening will go very,
2182 // very wrong.
2183 if (klass->IsResolved() && !klass->IsErroneousResolved()) {
2184 if (klass->GetStatus() < ClassStatus::kVerified) {
2185 ObjectLock<mirror::Class> lock(soa.Self(), klass);
2186 // Set class status to verified.
2187 mirror::Class::SetStatus(klass, ClassStatus::kVerified, soa.Self());
2188 // Mark methods as pre-verified. If we don't do this, the interpreter will run with
2189 // access checks.
2190 klass->SetSkipAccessChecksFlagOnAllMethods(
2191 GetInstructionSetPointerSize(manager_->GetCompiler()->GetInstructionSet()));
2192 klass->SetVerificationAttempted();
2193 }
2194 // Record the final class status if necessary.
2195 ClassReference ref(manager_->GetDexFile(), class_def_index);
2196 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus());
2197 }
2198 } else {
2199 Thread* self = soa.Self();
2200 DCHECK(self->IsExceptionPending());
2201 self->ClearException();
2202 }
2203 }
2204
2205 private:
2206 const ParallelCompilationManager* const manager_;
2207 };
2208
SetVerifiedDexFile(jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings)2209 void CompilerDriver::SetVerifiedDexFile(jobject class_loader,
2210 const DexFile& dex_file,
2211 const std::vector<const DexFile*>& dex_files,
2212 ThreadPool* thread_pool,
2213 size_t thread_count,
2214 TimingLogger* timings) {
2215 TimingLogger::ScopedTiming t("Verify Dex File", timings);
2216 if (!compiled_classes_.HaveDexFile(&dex_file)) {
2217 compiled_classes_.AddDexFile(&dex_file);
2218 }
2219 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2220 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files,
2221 thread_pool);
2222 SetVerifiedClassVisitor visitor(&context);
2223 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count);
2224 }
2225
2226 class InitializeClassVisitor : public CompilationVisitor {
2227 public:
InitializeClassVisitor(const ParallelCompilationManager * manager)2228 explicit InitializeClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {}
2229
Visit(size_t class_def_index)2230 void Visit(size_t class_def_index) OVERRIDE {
2231 ScopedTrace trace(__FUNCTION__);
2232 jobject jclass_loader = manager_->GetClassLoader();
2233 const DexFile& dex_file = *manager_->GetDexFile();
2234 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2235 const DexFile::TypeId& class_type_id = dex_file.GetTypeId(class_def.class_idx_);
2236 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_);
2237
2238 ScopedObjectAccess soa(Thread::Current());
2239 StackHandleScope<3> hs(soa.Self());
2240 Handle<mirror::ClassLoader> class_loader(
2241 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2242 Handle<mirror::Class> klass(
2243 hs.NewHandle(manager_->GetClassLinker()->FindClass(soa.Self(), descriptor, class_loader)));
2244
2245 if (klass != nullptr && !SkipClass(manager_->GetClassLoader(), dex_file, klass.Get())) {
2246 TryInitializeClass(klass, class_loader);
2247 }
2248 // Clear any class not found or verification exceptions.
2249 soa.Self()->ClearException();
2250 }
2251
2252 // A helper function for initializing klass.
TryInitializeClass(Handle<mirror::Class> klass,Handle<mirror::ClassLoader> & class_loader)2253 void TryInitializeClass(Handle<mirror::Class> klass, Handle<mirror::ClassLoader>& class_loader)
2254 REQUIRES_SHARED(Locks::mutator_lock_) {
2255 const DexFile& dex_file = klass->GetDexFile();
2256 const DexFile::ClassDef* class_def = klass->GetClassDef();
2257 const DexFile::TypeId& class_type_id = dex_file.GetTypeId(class_def->class_idx_);
2258 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_);
2259 ScopedObjectAccessUnchecked soa(Thread::Current());
2260 StackHandleScope<3> hs(soa.Self());
2261 const bool is_boot_image = manager_->GetCompiler()->GetCompilerOptions().IsBootImage();
2262 const bool is_app_image = manager_->GetCompiler()->GetCompilerOptions().IsAppImage();
2263
2264 ClassStatus old_status = klass->GetStatus();
2265 // Don't initialize classes in boot space when compiling app image
2266 if (is_app_image && klass->IsBootStrapClassLoaded()) {
2267 // Also return early and don't store the class status in the recorded class status.
2268 return;
2269 }
2270 // Only try to initialize classes that were successfully verified.
2271 if (klass->IsVerified()) {
2272 // Attempt to initialize the class but bail if we either need to initialize the super-class
2273 // or static fields.
2274 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, false);
2275 old_status = klass->GetStatus();
2276 if (!klass->IsInitialized()) {
2277 // We don't want non-trivial class initialization occurring on multiple threads due to
2278 // deadlock problems. For example, a parent class is initialized (holding its lock) that
2279 // refers to a sub-class in its static/class initializer causing it to try to acquire the
2280 // sub-class' lock. While on a second thread the sub-class is initialized (holding its lock)
2281 // after first initializing its parents, whose locks are acquired. This leads to a
2282 // parent-to-child and a child-to-parent lock ordering and consequent potential deadlock.
2283 // We need to use an ObjectLock due to potential suspension in the interpreting code. Rather
2284 // than use a special Object for the purpose we use the Class of java.lang.Class.
2285 Handle<mirror::Class> h_klass(hs.NewHandle(klass->GetClass()));
2286 ObjectLock<mirror::Class> lock(soa.Self(), h_klass);
2287 // Attempt to initialize allowing initialization of parent classes but still not static
2288 // fields.
2289 // Initialize dependencies first only for app image, to make TryInitialize recursive.
2290 bool is_superclass_initialized = !is_app_image ? true :
2291 InitializeDependencies(klass, class_loader, soa.Self());
2292 if (!is_app_image || (is_app_image && is_superclass_initialized)) {
2293 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, true);
2294 }
2295 // Otherwise it's in app image but superclasses can't be initialized, no need to proceed.
2296 old_status = klass->GetStatus();
2297
2298 bool too_many_encoded_fields = false;
2299 if (!is_boot_image && klass->NumStaticFields() > kMaxEncodedFields) {
2300 too_many_encoded_fields = true;
2301 }
2302 // If the class was not initialized, we can proceed to see if we can initialize static
2303 // fields. Limit the max number of encoded fields.
2304 if (!klass->IsInitialized() &&
2305 (is_app_image || is_boot_image) &&
2306 is_superclass_initialized &&
2307 !too_many_encoded_fields &&
2308 manager_->GetCompiler()->IsImageClass(descriptor)) {
2309 bool can_init_static_fields = false;
2310 if (is_boot_image) {
2311 // We need to initialize static fields, we only do this for image classes that aren't
2312 // marked with the $NoPreloadHolder (which implies this should not be initialized
2313 // early).
2314 can_init_static_fields = !StringPiece(descriptor).ends_with("$NoPreloadHolder;");
2315 } else {
2316 CHECK(is_app_image);
2317 // The boot image case doesn't need to recursively initialize the dependencies with
2318 // special logic since the class linker already does this.
2319 can_init_static_fields =
2320 ClassLinker::kAppImageMayContainStrings &&
2321 !soa.Self()->IsExceptionPending() &&
2322 is_superclass_initialized &&
2323 NoClinitInDependency(klass, soa.Self(), &class_loader);
2324 // TODO The checking for clinit can be removed since it's already
2325 // checked when init superclass. Currently keep it because it contains
2326 // processing of intern strings. Will be removed later when intern strings
2327 // and clinit are both initialized.
2328 }
2329
2330 if (can_init_static_fields) {
2331 VLOG(compiler) << "Initializing: " << descriptor;
2332 // TODO multithreading support. We should ensure the current compilation thread has
2333 // exclusive access to the runtime and the transaction. To achieve this, we could use
2334 // a ReaderWriterMutex but we're holding the mutator lock so we fail mutex sanity
2335 // checks in Thread::AssertThreadSuspensionIsAllowable.
2336 Runtime* const runtime = Runtime::Current();
2337 // Run the class initializer in transaction mode.
2338 runtime->EnterTransactionMode(is_app_image, klass.Get());
2339 bool success = manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, true,
2340 true);
2341 // TODO we detach transaction from runtime to indicate we quit the transactional
2342 // mode which prevents the GC from visiting objects modified during the transaction.
2343 // Ensure GC is not run so don't access freed objects when aborting transaction.
2344
2345 {
2346 ScopedAssertNoThreadSuspension ants("Transaction end");
2347
2348 if (success) {
2349 runtime->ExitTransactionMode();
2350 DCHECK(!runtime->IsActiveTransaction());
2351 }
2352
2353 if (!success) {
2354 CHECK(soa.Self()->IsExceptionPending());
2355 mirror::Throwable* exception = soa.Self()->GetException();
2356 VLOG(compiler) << "Initialization of " << descriptor << " aborted because of "
2357 << exception->Dump();
2358 std::ostream* file_log = manager_->GetCompiler()->
2359 GetCompilerOptions().GetInitFailureOutput();
2360 if (file_log != nullptr) {
2361 *file_log << descriptor << "\n";
2362 *file_log << exception->Dump() << "\n";
2363 }
2364 soa.Self()->ClearException();
2365 runtime->RollbackAllTransactions();
2366 CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored";
2367 } else if (is_boot_image) {
2368 // For boot image, we want to put the updated status in the oat class since we can't
2369 // reject the image anyways.
2370 old_status = klass->GetStatus();
2371 }
2372 }
2373
2374 if (!success) {
2375 // On failure, still intern strings of static fields and seen in <clinit>, as these
2376 // will be created in the zygote. This is separated from the transaction code just
2377 // above as we will allocate strings, so must be allowed to suspend.
2378 if (&klass->GetDexFile() == manager_->GetDexFile()) {
2379 InternStrings(klass, class_loader);
2380 } else {
2381 DCHECK(!is_boot_image) << "Boot image must have equal dex files";
2382 }
2383 }
2384 }
2385 }
2386 // If the class still isn't initialized, at least try some checks that initialization
2387 // would do so they can be skipped at runtime.
2388 if (!klass->IsInitialized() &&
2389 manager_->GetClassLinker()->ValidateSuperClassDescriptors(klass)) {
2390 old_status = ClassStatus::kSuperclassValidated;
2391 } else {
2392 soa.Self()->ClearException();
2393 }
2394 soa.Self()->AssertNoPendingException();
2395 }
2396 }
2397 // Record the final class status if necessary.
2398 ClassReference ref(&dex_file, klass->GetDexClassDefIndex());
2399 // Back up the status before doing initialization for static encoded fields,
2400 // because the static encoded branch wants to keep the status to uninitialized.
2401 manager_->GetCompiler()->RecordClassStatus(ref, old_status);
2402 }
2403
2404 private:
InternStrings(Handle<mirror::Class> klass,Handle<mirror::ClassLoader> class_loader)2405 void InternStrings(Handle<mirror::Class> klass, Handle<mirror::ClassLoader> class_loader)
2406 REQUIRES_SHARED(Locks::mutator_lock_) {
2407 DCHECK(manager_->GetCompiler()->GetCompilerOptions().IsBootImage());
2408 DCHECK(klass->IsVerified());
2409 DCHECK(!klass->IsInitialized());
2410
2411 StackHandleScope<1> hs(Thread::Current());
2412 Handle<mirror::DexCache> dex_cache = hs.NewHandle(klass->GetDexCache());
2413 const DexFile::ClassDef* class_def = klass->GetClassDef();
2414 ClassLinker* class_linker = manager_->GetClassLinker();
2415
2416 // Check encoded final field values for strings and intern.
2417 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
2418 class_loader,
2419 manager_->GetClassLinker(),
2420 *class_def);
2421 for ( ; value_it.HasNext(); value_it.Next()) {
2422 if (value_it.GetValueType() == annotations::RuntimeEncodedStaticFieldValueIterator::kString) {
2423 // Resolve the string. This will intern the string.
2424 art::ObjPtr<mirror::String> resolved = class_linker->ResolveString(
2425 dex::StringIndex(value_it.GetJavaValue().i), dex_cache);
2426 CHECK(resolved != nullptr);
2427 }
2428 }
2429
2430 // Intern strings seen in <clinit>.
2431 ArtMethod* clinit = klass->FindClassInitializer(class_linker->GetImagePointerSize());
2432 if (clinit != nullptr) {
2433 for (const DexInstructionPcPair& inst : clinit->DexInstructions()) {
2434 if (inst->Opcode() == Instruction::CONST_STRING) {
2435 ObjPtr<mirror::String> s = class_linker->ResolveString(
2436 dex::StringIndex(inst->VRegB_21c()), dex_cache);
2437 CHECK(s != nullptr);
2438 } else if (inst->Opcode() == Instruction::CONST_STRING_JUMBO) {
2439 ObjPtr<mirror::String> s = class_linker->ResolveString(
2440 dex::StringIndex(inst->VRegB_31c()), dex_cache);
2441 CHECK(s != nullptr);
2442 }
2443 }
2444 }
2445 }
2446
ResolveTypesOfMethods(Thread * self,ArtMethod * m)2447 bool ResolveTypesOfMethods(Thread* self, ArtMethod* m)
2448 REQUIRES_SHARED(Locks::mutator_lock_) {
2449 // Return value of ResolveReturnType() is discarded because resolve will be done internally.
2450 ObjPtr<mirror::Class> rtn_type = m->ResolveReturnType();
2451 if (rtn_type == nullptr) {
2452 self->ClearException();
2453 return false;
2454 }
2455 const DexFile::TypeList* types = m->GetParameterTypeList();
2456 if (types != nullptr) {
2457 for (uint32_t i = 0; i < types->Size(); ++i) {
2458 dex::TypeIndex param_type_idx = types->GetTypeItem(i).type_idx_;
2459 ObjPtr<mirror::Class> param_type = m->ResolveClassFromTypeIndex(param_type_idx);
2460 if (param_type == nullptr) {
2461 self->ClearException();
2462 return false;
2463 }
2464 }
2465 }
2466 return true;
2467 }
2468
2469 // Pre resolve types mentioned in all method signatures before start a transaction
2470 // since ResolveType doesn't work in transaction mode.
PreResolveTypes(Thread * self,const Handle<mirror::Class> & klass)2471 bool PreResolveTypes(Thread* self, const Handle<mirror::Class>& klass)
2472 REQUIRES_SHARED(Locks::mutator_lock_) {
2473 PointerSize pointer_size = manager_->GetClassLinker()->GetImagePointerSize();
2474 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
2475 if (!ResolveTypesOfMethods(self, &m)) {
2476 return false;
2477 }
2478 }
2479 if (klass->IsInterface()) {
2480 return true;
2481 } else if (klass->HasSuperClass()) {
2482 StackHandleScope<1> hs(self);
2483 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(klass->GetSuperClass()));
2484 for (int i = super_klass->GetVTableLength() - 1; i >= 0; --i) {
2485 ArtMethod* m = klass->GetVTableEntry(i, pointer_size);
2486 ArtMethod* super_m = super_klass->GetVTableEntry(i, pointer_size);
2487 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) {
2488 return false;
2489 }
2490 }
2491 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
2492 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
2493 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
2494 uint32_t num_methods = super_klass->NumVirtualMethods();
2495 for (uint32_t j = 0; j < num_methods; ++j) {
2496 ArtMethod* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
2497 j, pointer_size);
2498 ArtMethod* super_m = super_klass->GetVirtualMethod(j, pointer_size);
2499 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) {
2500 return false;
2501 }
2502 }
2503 }
2504 }
2505 }
2506 return true;
2507 }
2508
2509 // Initialize the klass's dependencies recursively before initializing itself.
2510 // Checking for interfaces is also necessary since interfaces can contain
2511 // both default methods and static encoded fields.
InitializeDependencies(const Handle<mirror::Class> & klass,Handle<mirror::ClassLoader> class_loader,Thread * self)2512 bool InitializeDependencies(const Handle<mirror::Class>& klass,
2513 Handle<mirror::ClassLoader> class_loader,
2514 Thread* self)
2515 REQUIRES_SHARED(Locks::mutator_lock_) {
2516 if (klass->HasSuperClass()) {
2517 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
2518 StackHandleScope<1> hs(self);
2519 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
2520 if (!handle_scope_super->IsInitialized()) {
2521 this->TryInitializeClass(handle_scope_super, class_loader);
2522 if (!handle_scope_super->IsInitialized()) {
2523 return false;
2524 }
2525 }
2526 }
2527
2528 uint32_t num_if = klass->NumDirectInterfaces();
2529 for (size_t i = 0; i < num_if; i++) {
2530 ObjPtr<mirror::Class>
2531 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i);
2532 StackHandleScope<1> hs(self);
2533 Handle<mirror::Class> handle_interface(hs.NewHandle(interface));
2534
2535 TryInitializeClass(handle_interface, class_loader);
2536
2537 if (!handle_interface->IsInitialized()) {
2538 return false;
2539 }
2540 }
2541
2542 return PreResolveTypes(self, klass);
2543 }
2544
2545 // In this phase the classes containing class initializers are ignored. Make sure no
2546 // clinit appears in kalss's super class chain and interfaces.
NoClinitInDependency(const Handle<mirror::Class> & klass,Thread * self,Handle<mirror::ClassLoader> * class_loader)2547 bool NoClinitInDependency(const Handle<mirror::Class>& klass,
2548 Thread* self,
2549 Handle<mirror::ClassLoader>* class_loader)
2550 REQUIRES_SHARED(Locks::mutator_lock_) {
2551 ArtMethod* clinit =
2552 klass->FindClassInitializer(manager_->GetClassLinker()->GetImagePointerSize());
2553 if (clinit != nullptr) {
2554 VLOG(compiler) << klass->PrettyClass() << ' ' << clinit->PrettyMethod(true);
2555 return false;
2556 }
2557 if (klass->HasSuperClass()) {
2558 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
2559 StackHandleScope<1> hs(self);
2560 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
2561 if (!NoClinitInDependency(handle_scope_super, self, class_loader)) {
2562 return false;
2563 }
2564 }
2565
2566 uint32_t num_if = klass->NumDirectInterfaces();
2567 for (size_t i = 0; i < num_if; i++) {
2568 ObjPtr<mirror::Class>
2569 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i);
2570 StackHandleScope<1> hs(self);
2571 Handle<mirror::Class> handle_interface(hs.NewHandle(interface));
2572 if (!NoClinitInDependency(handle_interface, self, class_loader)) {
2573 return false;
2574 }
2575 }
2576
2577 return true;
2578 }
2579
2580 const ParallelCompilationManager* const manager_;
2581 };
2582
InitializeClasses(jobject jni_class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2583 void CompilerDriver::InitializeClasses(jobject jni_class_loader,
2584 const DexFile& dex_file,
2585 const std::vector<const DexFile*>& dex_files,
2586 TimingLogger* timings) {
2587 TimingLogger::ScopedTiming t("InitializeNoClinit", timings);
2588
2589 // Initialization allocates objects and needs to run single-threaded to be deterministic.
2590 bool force_determinism = GetCompilerOptions().IsForceDeterminism();
2591 ThreadPool* init_thread_pool = force_determinism
2592 ? single_thread_pool_.get()
2593 : parallel_thread_pool_.get();
2594 size_t init_thread_count = force_determinism ? 1U : parallel_thread_count_;
2595
2596 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2597 ParallelCompilationManager context(class_linker, jni_class_loader, this, &dex_file, dex_files,
2598 init_thread_pool);
2599
2600 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) {
2601 // Set the concurrency thread to 1 to support initialization for App Images since transaction
2602 // doesn't support multithreading now.
2603 // TODO: remove this when transactional mode supports multithreading.
2604 init_thread_count = 1U;
2605 }
2606 InitializeClassVisitor visitor(&context);
2607 context.ForAll(0, dex_file.NumClassDefs(), &visitor, init_thread_count);
2608 }
2609
2610 class InitializeArrayClassesAndCreateConflictTablesVisitor : public ClassVisitor {
2611 public:
InitializeArrayClassesAndCreateConflictTablesVisitor(VariableSizedHandleScope & hs)2612 explicit InitializeArrayClassesAndCreateConflictTablesVisitor(VariableSizedHandleScope& hs)
2613 : hs_(hs) {}
2614
operator ()(ObjPtr<mirror::Class> klass)2615 virtual bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE
2616 REQUIRES_SHARED(Locks::mutator_lock_) {
2617 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2618 return true;
2619 }
2620 if (klass->IsArrayClass()) {
2621 StackHandleScope<1> hs(Thread::Current());
2622 auto h_klass = hs.NewHandleWrapper(&klass);
2623 Runtime::Current()->GetClassLinker()->EnsureInitialized(hs.Self(), h_klass, true, true);
2624 }
2625 // Collect handles since there may be thread suspension in future EnsureInitialized.
2626 to_visit_.push_back(hs_.NewHandle(klass));
2627 return true;
2628 }
2629
FillAllIMTAndConflictTables()2630 void FillAllIMTAndConflictTables() REQUIRES_SHARED(Locks::mutator_lock_) {
2631 for (Handle<mirror::Class> c : to_visit_) {
2632 // Create the conflict tables.
2633 FillIMTAndConflictTables(c.Get());
2634 }
2635 }
2636
2637 private:
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)2638 void FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)
2639 REQUIRES_SHARED(Locks::mutator_lock_) {
2640 if (!klass->ShouldHaveImt()) {
2641 return;
2642 }
2643 if (visited_classes_.find(klass) != visited_classes_.end()) {
2644 return;
2645 }
2646 if (klass->HasSuperClass()) {
2647 FillIMTAndConflictTables(klass->GetSuperClass());
2648 }
2649 if (!klass->IsTemp()) {
2650 Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass);
2651 }
2652 visited_classes_.insert(klass);
2653 }
2654
2655 VariableSizedHandleScope& hs_;
2656 std::vector<Handle<mirror::Class>> to_visit_;
2657 std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr> visited_classes_;
2658 };
2659
InitializeClasses(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2660 void CompilerDriver::InitializeClasses(jobject class_loader,
2661 const std::vector<const DexFile*>& dex_files,
2662 TimingLogger* timings) {
2663 for (size_t i = 0; i != dex_files.size(); ++i) {
2664 const DexFile* dex_file = dex_files[i];
2665 CHECK(dex_file != nullptr);
2666 InitializeClasses(class_loader, *dex_file, dex_files, timings);
2667 }
2668 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) {
2669 // Make sure that we call EnsureIntiailized on all the array classes to call
2670 // SetVerificationAttempted so that the access flags are set. If we do not do this they get
2671 // changed at runtime resulting in more dirty image pages.
2672 // Also create conflict tables.
2673 // Only useful if we are compiling an image (image_classes_ is not null).
2674 ScopedObjectAccess soa(Thread::Current());
2675 VariableSizedHandleScope hs(soa.Self());
2676 InitializeArrayClassesAndCreateConflictTablesVisitor visitor(hs);
2677 Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor);
2678 visitor.FillAllIMTAndConflictTables();
2679 }
2680 if (GetCompilerOptions().IsBootImage()) {
2681 // Prune garbage objects created during aborted transactions.
2682 Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ true);
2683 }
2684 }
2685
2686 template <typename CompileFn>
CompileDexFile(CompilerDriver * driver,jobject class_loader,const DexFile & dex_file,const std::vector<const DexFile * > & dex_files,ThreadPool * thread_pool,size_t thread_count,TimingLogger * timings,const char * timing_name,CompileFn compile_fn)2687 static void CompileDexFile(CompilerDriver* driver,
2688 jobject class_loader,
2689 const DexFile& dex_file,
2690 const std::vector<const DexFile*>& dex_files,
2691 ThreadPool* thread_pool,
2692 size_t thread_count,
2693 TimingLogger* timings,
2694 const char* timing_name,
2695 CompileFn compile_fn) {
2696 TimingLogger::ScopedTiming t(timing_name, timings);
2697 ParallelCompilationManager context(Runtime::Current()->GetClassLinker(),
2698 class_loader,
2699 driver,
2700 &dex_file,
2701 dex_files,
2702 thread_pool);
2703
2704 auto compile = [&context, &compile_fn](size_t class_def_index) {
2705 ScopedTrace trace(__FUNCTION__);
2706 const DexFile& dex_file = *context.GetDexFile();
2707 const DexFile::ClassDef& class_def = dex_file.GetClassDef(class_def_index);
2708 ClassLinker* class_linker = context.GetClassLinker();
2709 jobject jclass_loader = context.GetClassLoader();
2710 ClassReference ref(&dex_file, class_def_index);
2711 // Skip compiling classes with generic verifier failures since they will still fail at runtime
2712 if (context.GetCompiler()->GetVerificationResults()->IsClassRejected(ref)) {
2713 return;
2714 }
2715 // Use a scoped object access to perform to the quick SkipClass check.
2716 const char* descriptor = dex_file.GetClassDescriptor(class_def);
2717 ScopedObjectAccess soa(Thread::Current());
2718 StackHandleScope<3> hs(soa.Self());
2719 Handle<mirror::ClassLoader> class_loader(
2720 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
2721 Handle<mirror::Class> klass(
2722 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader)));
2723 Handle<mirror::DexCache> dex_cache;
2724 if (klass == nullptr) {
2725 soa.Self()->AssertPendingException();
2726 soa.Self()->ClearException();
2727 dex_cache = hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file));
2728 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) {
2729 return;
2730 } else {
2731 dex_cache = hs.NewHandle(klass->GetDexCache());
2732 }
2733
2734 const uint8_t* class_data = dex_file.GetClassData(class_def);
2735 if (class_data == nullptr) {
2736 // empty class, probably a marker interface
2737 return;
2738 }
2739
2740 // Go to native so that we don't block GC during compilation.
2741 ScopedThreadSuspension sts(soa.Self(), kNative);
2742
2743 CompilerDriver* const driver = context.GetCompiler();
2744
2745 // Can we run DEX-to-DEX compiler on this class ?
2746 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level =
2747 GetDexToDexCompilationLevel(soa.Self(), *driver, jclass_loader, dex_file, class_def);
2748
2749 ClassDataItemIterator it(dex_file, class_data);
2750 it.SkipAllFields();
2751
2752 bool compilation_enabled = driver->IsClassToCompile(
2753 dex_file.StringByTypeIdx(class_def.class_idx_));
2754
2755 // Compile direct and virtual methods.
2756 int64_t previous_method_idx = -1;
2757 while (it.HasNextMethod()) {
2758 uint32_t method_idx = it.GetMemberIndex();
2759 if (method_idx == previous_method_idx) {
2760 // smali can create dex files with two encoded_methods sharing the same method_idx
2761 // http://code.google.com/p/smali/issues/detail?id=119
2762 it.Next();
2763 continue;
2764 }
2765 previous_method_idx = method_idx;
2766 compile_fn(soa.Self(),
2767 driver,
2768 it.GetMethodCodeItem(),
2769 it.GetMethodAccessFlags(),
2770 it.GetMethodInvokeType(class_def),
2771 class_def_index,
2772 method_idx,
2773 class_loader,
2774 dex_file,
2775 dex_to_dex_compilation_level,
2776 compilation_enabled,
2777 dex_cache);
2778 it.Next();
2779 }
2780 DCHECK(!it.HasNext());
2781 };
2782 context.ForAllLambda(0, dex_file.NumClassDefs(), compile, thread_count);
2783 }
2784
Compile(jobject class_loader,const std::vector<const DexFile * > & dex_files,TimingLogger * timings)2785 void CompilerDriver::Compile(jobject class_loader,
2786 const std::vector<const DexFile*>& dex_files,
2787 TimingLogger* timings) {
2788 if (kDebugProfileGuidedCompilation) {
2789 LOG(INFO) << "[ProfileGuidedCompilation] " <<
2790 ((profile_compilation_info_ == nullptr)
2791 ? "null"
2792 : profile_compilation_info_->DumpInfo(&dex_files));
2793 }
2794
2795 dex_to_dex_compiler_.ClearState();
2796 for (const DexFile* dex_file : dex_files) {
2797 CHECK(dex_file != nullptr);
2798 CompileDexFile(this,
2799 class_loader,
2800 *dex_file,
2801 dex_files,
2802 parallel_thread_pool_.get(),
2803 parallel_thread_count_,
2804 timings,
2805 "Compile Dex File Quick",
2806 CompileMethodQuick);
2807 const ArenaPool* const arena_pool = Runtime::Current()->GetArenaPool();
2808 const size_t arena_alloc = arena_pool->GetBytesAllocated();
2809 max_arena_alloc_ = std::max(arena_alloc, max_arena_alloc_);
2810 Runtime::Current()->ReclaimArenaPoolMemory();
2811 }
2812
2813 if (dex_to_dex_compiler_.NumCodeItemsToQuicken(Thread::Current()) > 0u) {
2814 // TODO: Not visit all of the dex files, its probably rare that only one would have quickened
2815 // methods though.
2816 for (const DexFile* dex_file : dex_files) {
2817 CompileDexFile(this,
2818 class_loader,
2819 *dex_file,
2820 dex_files,
2821 parallel_thread_pool_.get(),
2822 parallel_thread_count_,
2823 timings,
2824 "Compile Dex File Dex2Dex",
2825 CompileMethodDex2Dex);
2826 }
2827 dex_to_dex_compiler_.ClearState();
2828 }
2829
2830 VLOG(compiler) << "Compile: " << GetMemoryUsageString(false);
2831 }
2832
AddCompiledMethod(const MethodReference & method_ref,CompiledMethod * const compiled_method,size_t non_relative_linker_patch_count)2833 void CompilerDriver::AddCompiledMethod(const MethodReference& method_ref,
2834 CompiledMethod* const compiled_method,
2835 size_t non_relative_linker_patch_count) {
2836 DCHECK(GetCompiledMethod(method_ref) == nullptr) << method_ref.PrettyMethod();
2837 MethodTable::InsertResult result = compiled_methods_.Insert(method_ref,
2838 /*expected*/ nullptr,
2839 compiled_method);
2840 CHECK(result == MethodTable::kInsertResultSuccess);
2841 non_relative_linker_patch_count_.FetchAndAddRelaxed(non_relative_linker_patch_count);
2842 DCHECK(GetCompiledMethod(method_ref) != nullptr) << method_ref.PrettyMethod();
2843 }
2844
RemoveCompiledMethod(const MethodReference & method_ref)2845 CompiledMethod* CompilerDriver::RemoveCompiledMethod(const MethodReference& method_ref) {
2846 CompiledMethod* ret = nullptr;
2847 CHECK(compiled_methods_.Remove(method_ref, &ret));
2848 return ret;
2849 }
2850
GetCompiledClass(const ClassReference & ref,ClassStatus * status) const2851 bool CompilerDriver::GetCompiledClass(const ClassReference& ref, ClassStatus* status) const {
2852 DCHECK(status != nullptr);
2853 // The table doesn't know if something wasn't inserted. For this case it will return
2854 // ClassStatus::kNotReady. To handle this, just assume anything we didn't try to verify
2855 // is not compiled.
2856 if (!compiled_classes_.Get(ref, status) ||
2857 *status < ClassStatus::kRetryVerificationAtRuntime) {
2858 return false;
2859 }
2860 return true;
2861 }
2862
GetClassStatus(const ClassReference & ref) const2863 ClassStatus CompilerDriver::GetClassStatus(const ClassReference& ref) const {
2864 ClassStatus status = ClassStatus::kNotReady;
2865 if (!GetCompiledClass(ref, &status)) {
2866 classpath_classes_.Get(ref, &status);
2867 }
2868 return status;
2869 }
2870
RecordClassStatus(const ClassReference & ref,ClassStatus status)2871 void CompilerDriver::RecordClassStatus(const ClassReference& ref, ClassStatus status) {
2872 switch (status) {
2873 case ClassStatus::kErrorResolved:
2874 case ClassStatus::kErrorUnresolved:
2875 case ClassStatus::kNotReady:
2876 case ClassStatus::kResolved:
2877 case ClassStatus::kRetryVerificationAtRuntime:
2878 case ClassStatus::kVerified:
2879 case ClassStatus::kSuperclassValidated:
2880 case ClassStatus::kInitialized:
2881 break; // Expected states.
2882 default:
2883 LOG(FATAL) << "Unexpected class status for class "
2884 << PrettyDescriptor(
2885 ref.dex_file->GetClassDescriptor(ref.dex_file->GetClassDef(ref.index)))
2886 << " of " << status;
2887 }
2888
2889 ClassStateTable::InsertResult result;
2890 ClassStateTable* table = &compiled_classes_;
2891 do {
2892 ClassStatus existing = ClassStatus::kNotReady;
2893 if (!table->Get(ref, &existing)) {
2894 // A classpath class.
2895 if (kIsDebugBuild) {
2896 // Check to make sure it's not a dex file for an oat file we are compiling since these
2897 // should always succeed. These do not include classes in for used libraries.
2898 for (const DexFile* dex_file : GetDexFilesForOatFile()) {
2899 CHECK_NE(ref.dex_file, dex_file) << ref.dex_file->GetLocation();
2900 }
2901 }
2902 if (!classpath_classes_.HaveDexFile(ref.dex_file)) {
2903 // Boot classpath dex file.
2904 return;
2905 }
2906 table = &classpath_classes_;
2907 table->Get(ref, &existing);
2908 }
2909 if (existing >= status) {
2910 // Existing status is already better than we expect, break.
2911 break;
2912 }
2913 // Update the status if we now have a greater one. This happens with vdex,
2914 // which records a class is verified, but does not resolve it.
2915 result = table->Insert(ref, existing, status);
2916 CHECK(result != ClassStateTable::kInsertResultInvalidDexFile) << ref.dex_file->GetLocation();
2917 } while (result != ClassStateTable::kInsertResultSuccess);
2918 }
2919
GetCompiledMethod(MethodReference ref) const2920 CompiledMethod* CompilerDriver::GetCompiledMethod(MethodReference ref) const {
2921 CompiledMethod* compiled_method = nullptr;
2922 compiled_methods_.Get(ref, &compiled_method);
2923 return compiled_method;
2924 }
2925
IsMethodVerifiedWithoutFailures(uint32_t method_idx,uint16_t class_def_idx,const DexFile & dex_file) const2926 bool CompilerDriver::IsMethodVerifiedWithoutFailures(uint32_t method_idx,
2927 uint16_t class_def_idx,
2928 const DexFile& dex_file) const {
2929 const VerifiedMethod* verified_method = GetVerifiedMethod(&dex_file, method_idx);
2930 if (verified_method != nullptr) {
2931 return !verified_method->HasVerificationFailures();
2932 }
2933
2934 // If we can't find verification metadata, check if this is a system class (we trust that system
2935 // classes have their methods verified). If it's not, be conservative and assume the method
2936 // has not been verified successfully.
2937
2938 // TODO: When compiling the boot image it should be safe to assume that everything is verified,
2939 // even if methods are not found in the verification cache.
2940 const char* descriptor = dex_file.GetClassDescriptor(dex_file.GetClassDef(class_def_idx));
2941 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2942 Thread* self = Thread::Current();
2943 ScopedObjectAccess soa(self);
2944 bool is_system_class = class_linker->FindSystemClass(self, descriptor) != nullptr;
2945 if (!is_system_class) {
2946 self->ClearException();
2947 }
2948 return is_system_class;
2949 }
2950
GetNonRelativeLinkerPatchCount() const2951 size_t CompilerDriver::GetNonRelativeLinkerPatchCount() const {
2952 return non_relative_linker_patch_count_.LoadRelaxed();
2953 }
2954
SetRequiresConstructorBarrier(Thread * self,const DexFile * dex_file,uint16_t class_def_index,bool requires)2955 void CompilerDriver::SetRequiresConstructorBarrier(Thread* self,
2956 const DexFile* dex_file,
2957 uint16_t class_def_index,
2958 bool requires) {
2959 WriterMutexLock mu(self, requires_constructor_barrier_lock_);
2960 requires_constructor_barrier_.emplace(ClassReference(dex_file, class_def_index), requires);
2961 }
2962
RequiresConstructorBarrier(Thread * self,const DexFile * dex_file,uint16_t class_def_index)2963 bool CompilerDriver::RequiresConstructorBarrier(Thread* self,
2964 const DexFile* dex_file,
2965 uint16_t class_def_index) {
2966 ClassReference class_ref(dex_file, class_def_index);
2967 {
2968 ReaderMutexLock mu(self, requires_constructor_barrier_lock_);
2969 auto it = requires_constructor_barrier_.find(class_ref);
2970 if (it != requires_constructor_barrier_.end()) {
2971 return it->second;
2972 }
2973 }
2974 WriterMutexLock mu(self, requires_constructor_barrier_lock_);
2975 const bool requires = RequiresConstructorBarrier(*dex_file, class_def_index);
2976 requires_constructor_barrier_.emplace(class_ref, requires);
2977 return requires;
2978 }
2979
GetMemoryUsageString(bool extended) const2980 std::string CompilerDriver::GetMemoryUsageString(bool extended) const {
2981 std::ostringstream oss;
2982 const gc::Heap* const heap = Runtime::Current()->GetHeap();
2983 const size_t java_alloc = heap->GetBytesAllocated();
2984 oss << "arena alloc=" << PrettySize(max_arena_alloc_) << " (" << max_arena_alloc_ << "B)";
2985 oss << " java alloc=" << PrettySize(java_alloc) << " (" << java_alloc << "B)";
2986 #if defined(__BIONIC__) || defined(__GLIBC__)
2987 const struct mallinfo info = mallinfo();
2988 const size_t allocated_space = static_cast<size_t>(info.uordblks);
2989 const size_t free_space = static_cast<size_t>(info.fordblks);
2990 oss << " native alloc=" << PrettySize(allocated_space) << " (" << allocated_space << "B)"
2991 << " free=" << PrettySize(free_space) << " (" << free_space << "B)";
2992 #endif
2993 compiled_method_storage_.DumpMemoryUsage(oss, extended);
2994 return oss.str();
2995 }
2996
MayInlineInternal(const DexFile * inlined_from,const DexFile * inlined_into) const2997 bool CompilerDriver::MayInlineInternal(const DexFile* inlined_from,
2998 const DexFile* inlined_into) const {
2999 // We're not allowed to inline across dex files if we're the no-inline-from dex file.
3000 if (inlined_from != inlined_into &&
3001 compiler_options_->GetNoInlineFromDexFile() != nullptr &&
3002 ContainsElement(*compiler_options_->GetNoInlineFromDexFile(), inlined_from)) {
3003 return false;
3004 }
3005
3006 return true;
3007 }
3008
InitializeThreadPools()3009 void CompilerDriver::InitializeThreadPools() {
3010 size_t parallel_count = parallel_thread_count_ > 0 ? parallel_thread_count_ - 1 : 0;
3011 parallel_thread_pool_.reset(
3012 new ThreadPool("Compiler driver thread pool", parallel_count));
3013 single_thread_pool_.reset(new ThreadPool("Single-threaded Compiler driver thread pool", 0));
3014 }
3015
FreeThreadPools()3016 void CompilerDriver::FreeThreadPools() {
3017 parallel_thread_pool_.reset();
3018 single_thread_pool_.reset();
3019 }
3020
SetDexFilesForOatFile(const std::vector<const DexFile * > & dex_files)3021 void CompilerDriver::SetDexFilesForOatFile(const std::vector<const DexFile*>& dex_files) {
3022 dex_files_for_oat_file_ = dex_files;
3023 compiled_classes_.AddDexFiles(dex_files);
3024 dex_to_dex_compiler_.SetDexFiles(dex_files);
3025 }
3026
SetClasspathDexFiles(const std::vector<const DexFile * > & dex_files)3027 void CompilerDriver::SetClasspathDexFiles(const std::vector<const DexFile*>& dex_files) {
3028 classpath_classes_.AddDexFiles(dex_files);
3029 }
3030
3031 } // namespace art
3032