1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "class_linker.h"
18 
19 #include <unistd.h>
20 
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <map>
26 #include <memory>
27 #include <queue>
28 #include <string>
29 #include <string_view>
30 #include <tuple>
31 #include <utility>
32 #include <vector>
33 
34 #include "android-base/stringprintf.h"
35 
36 #include "art_field-inl.h"
37 #include "art_method-inl.h"
38 #include "barrier.h"
39 #include "base/arena_allocator.h"
40 #include "base/casts.h"
41 #include "base/file_utils.h"
42 #include "base/hash_map.h"
43 #include "base/hash_set.h"
44 #include "base/leb128.h"
45 #include "base/logging.h"
46 #include "base/metrics/metrics.h"
47 #include "base/mutex-inl.h"
48 #include "base/os.h"
49 #include "base/quasi_atomic.h"
50 #include "base/scoped_arena_containers.h"
51 #include "base/scoped_flock.h"
52 #include "base/stl_util.h"
53 #include "base/string_view_cpp20.h"
54 #include "base/systrace.h"
55 #include "base/time_utils.h"
56 #include "base/unix_file/fd_file.h"
57 #include "base/utils.h"
58 #include "base/value_object.h"
59 #include "cha.h"
60 #include "class_linker-inl.h"
61 #include "class_loader_utils.h"
62 #include "class_root-inl.h"
63 #include "class_table-inl.h"
64 #include "compiler_callbacks.h"
65 #include "debug_print.h"
66 #include "debugger.h"
67 #include "dex/class_accessor-inl.h"
68 #include "dex/descriptors_names.h"
69 #include "dex/dex_file-inl.h"
70 #include "dex/dex_file_exception_helpers.h"
71 #include "dex/dex_file_loader.h"
72 #include "dex/signature-inl.h"
73 #include "dex/utf.h"
74 #include "entrypoints/entrypoint_utils-inl.h"
75 #include "entrypoints/runtime_asm_entrypoints.h"
76 #include "experimental_flags.h"
77 #include "gc/accounting/card_table-inl.h"
78 #include "gc/accounting/heap_bitmap-inl.h"
79 #include "gc/accounting/space_bitmap-inl.h"
80 #include "gc/heap-visit-objects-inl.h"
81 #include "gc/heap.h"
82 #include "gc/scoped_gc_critical_section.h"
83 #include "gc/space/image_space.h"
84 #include "gc/space/space-inl.h"
85 #include "gc_root-inl.h"
86 #include "handle_scope-inl.h"
87 #include "hidden_api.h"
88 #include "image-inl.h"
89 #include "imt_conflict_table.h"
90 #include "imtable-inl.h"
91 #include "intern_table-inl.h"
92 #include "interpreter/interpreter.h"
93 #include "interpreter/mterp/nterp.h"
94 #include "jit/debugger_interface.h"
95 #include "jit/jit.h"
96 #include "jit/jit_code_cache.h"
97 #include "jni/java_vm_ext.h"
98 #include "jni/jni_internal.h"
99 #include "linear_alloc.h"
100 #include "mirror/array-alloc-inl.h"
101 #include "mirror/array-inl.h"
102 #include "mirror/call_site.h"
103 #include "mirror/class-alloc-inl.h"
104 #include "mirror/class-inl.h"
105 #include "mirror/class.h"
106 #include "mirror/class_ext.h"
107 #include "mirror/class_loader.h"
108 #include "mirror/dex_cache-inl.h"
109 #include "mirror/dex_cache.h"
110 #include "mirror/emulated_stack_frame.h"
111 #include "mirror/field.h"
112 #include "mirror/iftable-inl.h"
113 #include "mirror/method.h"
114 #include "mirror/method_handle_impl.h"
115 #include "mirror/method_handles_lookup.h"
116 #include "mirror/method_type.h"
117 #include "mirror/object-inl.h"
118 #include "mirror/object-refvisitor-inl.h"
119 #include "mirror/object.h"
120 #include "mirror/object_array-alloc-inl.h"
121 #include "mirror/object_array-inl.h"
122 #include "mirror/object_array.h"
123 #include "mirror/object_reference.h"
124 #include "mirror/object_reference-inl.h"
125 #include "mirror/proxy.h"
126 #include "mirror/reference-inl.h"
127 #include "mirror/stack_trace_element.h"
128 #include "mirror/string-inl.h"
129 #include "mirror/throwable.h"
130 #include "mirror/var_handle.h"
131 #include "native/dalvik_system_DexFile.h"
132 #include "nativehelper/scoped_local_ref.h"
133 #include "nterp_helpers.h"
134 #include "oat.h"
135 #include "oat_file-inl.h"
136 #include "oat_file.h"
137 #include "oat_file_assistant.h"
138 #include "oat_file_manager.h"
139 #include "object_lock.h"
140 #include "profile/profile_compilation_info.h"
141 #include "runtime.h"
142 #include "runtime_callbacks.h"
143 #include "scoped_thread_state_change-inl.h"
144 #include "thread-inl.h"
145 #include "thread.h"
146 #include "thread_list.h"
147 #include "trace.h"
148 #include "transaction.h"
149 #include "vdex_file.h"
150 #include "verifier/class_verifier.h"
151 #include "verifier/verifier_deps.h"
152 #include "well_known_classes.h"
153 
154 #include "interpreter/interpreter_mterp_impl.h"
155 
156 namespace art {
157 
158 using android::base::StringPrintf;
159 
160 static constexpr bool kCheckImageObjects = kIsDebugBuild;
161 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
162 
163 static void ThrowNoClassDefFoundError(const char* fmt, ...)
164     __attribute__((__format__(__printf__, 1, 2)))
165     REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)166 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
167   va_list args;
168   va_start(args, fmt);
169   Thread* self = Thread::Current();
170   self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
171   va_end(args);
172 }
173 
HasInitWithString(Thread * self,ClassLinker * class_linker,const char * descriptor)174 static bool HasInitWithString(Thread* self, ClassLinker* class_linker, const char* descriptor)
175     REQUIRES_SHARED(Locks::mutator_lock_) {
176   ArtMethod* method = self->GetCurrentMethod(nullptr);
177   StackHandleScope<1> hs(self);
178   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(method != nullptr ?
179       method->GetDeclaringClass()->GetClassLoader() : nullptr));
180   ObjPtr<mirror::Class> exception_class = class_linker->FindClass(self, descriptor, class_loader);
181 
182   if (exception_class == nullptr) {
183     // No exc class ~ no <init>-with-string.
184     CHECK(self->IsExceptionPending());
185     self->ClearException();
186     return false;
187   }
188 
189   ArtMethod* exception_init_method = exception_class->FindConstructor(
190       "(Ljava/lang/String;)V", class_linker->GetImagePointerSize());
191   return exception_init_method != nullptr;
192 }
193 
GetVerifyError(ObjPtr<mirror::Class> c)194 static ObjPtr<mirror::Object> GetVerifyError(ObjPtr<mirror::Class> c)
195     REQUIRES_SHARED(Locks::mutator_lock_) {
196   ObjPtr<mirror::ClassExt> ext(c->GetExtData());
197   if (ext == nullptr) {
198     return nullptr;
199   } else {
200     return ext->GetVerifyError();
201   }
202 }
203 
204 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierVerifyError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)205 static void HandleEarlierVerifyError(Thread* self,
206                                      ClassLinker* class_linker,
207                                      ObjPtr<mirror::Class> c)
208     REQUIRES_SHARED(Locks::mutator_lock_) {
209   ObjPtr<mirror::Object> obj = GetVerifyError(c);
210   DCHECK(obj != nullptr);
211   self->AssertNoPendingException();
212   if (obj->IsClass()) {
213     // Previous error has been stored as class. Create a new exception of that type.
214 
215     // It's possible the exception doesn't have a <init>(String).
216     std::string temp;
217     const char* descriptor = obj->AsClass()->GetDescriptor(&temp);
218 
219     if (HasInitWithString(self, class_linker, descriptor)) {
220       self->ThrowNewException(descriptor, c->PrettyDescriptor().c_str());
221     } else {
222       self->ThrowNewException(descriptor, nullptr);
223     }
224   } else {
225     // Previous error has been stored as an instance. Just rethrow.
226     ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
227     ObjPtr<mirror::Class> error_class = obj->GetClass();
228     CHECK(throwable_class->IsAssignableFrom(error_class));
229     self->SetException(obj->AsThrowable());
230   }
231   self->AssertPendingException();
232 }
233 
ChangeInterpreterBridgeToNterp(ArtMethod * method,ClassLinker * class_linker)234 static void ChangeInterpreterBridgeToNterp(ArtMethod* method, ClassLinker* class_linker)
235     REQUIRES_SHARED(Locks::mutator_lock_) {
236   Runtime* runtime = Runtime::Current();
237   if (class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()) &&
238       CanMethodUseNterp(method)) {
239     if (method->GetDeclaringClass()->IsVisiblyInitialized() ||
240         !NeedsClinitCheckBeforeCall(method)) {
241       runtime->GetInstrumentation()->UpdateMethodsCode(method, interpreter::GetNterpEntryPoint());
242     } else {
243       // Put the resolution stub, which will initialize the class and then
244       // call the method with nterp.
245       runtime->GetInstrumentation()->UpdateMethodsCode(method, GetQuickResolutionStub());
246     }
247   }
248 }
249 
250 // Ensures that methods have the kAccSkipAccessChecks bit set. We use the
251 // kAccVerificationAttempted bit on the class access flags to determine whether this has been done
252 // before.
EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass,PointerSize pointer_size)253 static void EnsureSkipAccessChecksMethods(Handle<mirror::Class> klass, PointerSize pointer_size)
254     REQUIRES_SHARED(Locks::mutator_lock_) {
255   Runtime* runtime = Runtime::Current();
256   ClassLinker* class_linker = runtime->GetClassLinker();
257   if (!klass->WasVerificationAttempted()) {
258     klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
259     klass->SetVerificationAttempted();
260     // Now that the class has passed verification, try to set nterp entrypoints
261     // to methods that currently use the switch interpreter.
262     if (interpreter::CanRuntimeUseNterp()) {
263       for (ArtMethod& m : klass->GetMethods(pointer_size)) {
264         ChangeInterpreterBridgeToNterp(&m, class_linker);
265       }
266     }
267   }
268 }
269 
270 // Callback responsible for making a batch of classes visibly initialized
271 // after all threads have called it from a checkpoint, ensuring visibility.
272 class ClassLinker::VisiblyInitializedCallback final
273     : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
274  public:
VisiblyInitializedCallback(ClassLinker * class_linker)275   explicit VisiblyInitializedCallback(ClassLinker* class_linker)
276       : class_linker_(class_linker),
277         num_classes_(0u),
278         thread_visibility_counter_(0),
279         barriers_() {
280     std::fill_n(classes_, kMaxClasses, nullptr);
281   }
282 
IsEmpty() const283   bool IsEmpty() const {
284     DCHECK_LE(num_classes_, kMaxClasses);
285     return num_classes_ == 0u;
286   }
287 
IsFull() const288   bool IsFull() const {
289     DCHECK_LE(num_classes_, kMaxClasses);
290     return num_classes_ == kMaxClasses;
291   }
292 
AddClass(Thread * self,ObjPtr<mirror::Class> klass)293   void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
294     DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
295     DCHECK(!IsFull());
296     classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
297     ++num_classes_;
298   }
299 
AddBarrier(Barrier * barrier)300   void AddBarrier(Barrier* barrier) {
301     barriers_.push_front(barrier);
302   }
303 
GetAndClearBarriers()304   std::forward_list<Barrier*> GetAndClearBarriers() {
305     std::forward_list<Barrier*> result;
306     result.swap(barriers_);
307     result.reverse();  // Return barriers in insertion order.
308     return result;
309   }
310 
MakeVisible(Thread * self)311   void MakeVisible(Thread* self) {
312     DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
313     size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
314     AdjustThreadVisibilityCounter(self, count);
315   }
316 
Run(Thread * self)317   void Run(Thread* self) override {
318     self->ClearMakeVisiblyInitializedCounter();
319     AdjustThreadVisibilityCounter(self, -1);
320   }
321 
322  private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)323   void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
324     ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
325     if (old + adjustment == 0) {
326       // All threads passed the checkpoint. Mark classes as visibly initialized.
327       {
328         ScopedObjectAccess soa(self);
329         StackHandleScope<1u> hs(self);
330         MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
331         JavaVMExt* vm = self->GetJniEnv()->GetVm();
332         for (size_t i = 0, num = num_classes_; i != num; ++i) {
333           klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
334           vm->DeleteWeakGlobalRef(self, classes_[i]);
335           if (klass != nullptr) {
336             mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
337             class_linker_->FixupStaticTrampolines(self, klass.Get());
338           }
339         }
340         num_classes_ = 0u;
341       }
342       class_linker_->VisiblyInitializedCallbackDone(self, this);
343     }
344   }
345 
346   static constexpr size_t kMaxClasses = 16;
347 
348   ClassLinker* const class_linker_;
349   size_t num_classes_;
350   jweak classes_[kMaxClasses];
351 
352   // The thread visibility counter starts at 0 and it is incremented by the number of
353   // threads that need to run this callback (by the thread that request the callback
354   // to be run) and decremented once for each `Run()` execution. When it reaches 0,
355   // whether after the increment or after a decrement, we know that `Run()` was executed
356   // for all threads and therefore we can mark the classes as visibly initialized.
357   std::atomic<ssize_t> thread_visibility_counter_;
358 
359   // List of barries to `Pass()` for threads that wait for the callback to complete.
360   std::forward_list<Barrier*> barriers_;
361 };
362 
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)363 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
364   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
365     return;  // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
366   }
367   std::optional<Barrier> maybe_barrier;  // Avoid constructing the Barrier for `wait == false`.
368   if (wait) {
369     maybe_barrier.emplace(0);
370   }
371   int wait_count = 0;
372   VisiblyInitializedCallback* callback = nullptr;
373   {
374     MutexLock lock(self, visibly_initialized_callback_lock_);
375     if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
376       callback = visibly_initialized_callback_.release();
377       running_visibly_initialized_callbacks_.push_front(*callback);
378     }
379     if (wait) {
380       DCHECK(maybe_barrier.has_value());
381       Barrier* barrier = std::addressof(*maybe_barrier);
382       for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
383         cb.AddBarrier(barrier);
384         ++wait_count;
385       }
386     }
387   }
388   if (callback != nullptr) {
389     callback->MakeVisible(self);
390   }
391   if (wait_count != 0) {
392     DCHECK(maybe_barrier.has_value());
393     maybe_barrier->Increment(self, wait_count);
394   }
395 }
396 
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)397 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
398                                                  VisiblyInitializedCallback* callback) {
399   MutexLock lock(self, visibly_initialized_callback_lock_);
400   // Pass the barriers if requested.
401   for (Barrier* barrier : callback->GetAndClearBarriers()) {
402     barrier->Pass(self);
403   }
404   // Remove the callback from the list of running callbacks.
405   auto before = running_visibly_initialized_callbacks_.before_begin();
406   auto it = running_visibly_initialized_callbacks_.begin();
407   DCHECK(it != running_visibly_initialized_callbacks_.end());
408   while (std::addressof(*it) != callback) {
409     before = it;
410     ++it;
411     DCHECK(it != running_visibly_initialized_callbacks_.end());
412   }
413   running_visibly_initialized_callbacks_.erase_after(before);
414   // Reuse or destroy the callback object.
415   if (visibly_initialized_callback_ == nullptr) {
416     visibly_initialized_callback_.reset(callback);
417   } else {
418     delete callback;
419   }
420 }
421 
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)422 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
423   ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
424   if (cb != nullptr) {
425     cb->MakeVisible(self);
426   }
427   ScopedThreadSuspension sts(self, ThreadState::kSuspended);
428   MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
429 }
430 
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)431 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
432     Thread* self, Handle<mirror::Class> klass) {
433   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
434     // Thanks to the x86 memory model, we do not need any memory fences and
435     // we can immediately mark the class as visibly initialized.
436     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
437     FixupStaticTrampolines(self, klass.Get());
438     return nullptr;
439   }
440   if (Runtime::Current()->IsActiveTransaction()) {
441     // Transactions are single-threaded, so we can mark the class as visibly intialized.
442     // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
443     mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
444     FixupStaticTrampolines(self, klass.Get());
445     return nullptr;
446   }
447   mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
448   MutexLock lock(self, visibly_initialized_callback_lock_);
449   if (visibly_initialized_callback_ == nullptr) {
450     visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
451   }
452   DCHECK(!visibly_initialized_callback_->IsFull());
453   visibly_initialized_callback_->AddClass(self, klass.Get());
454 
455   if (visibly_initialized_callback_->IsFull()) {
456     VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
457     running_visibly_initialized_callbacks_.push_front(*callback);
458     return callback;
459   } else {
460     return nullptr;
461   }
462 }
463 
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)464 const void* ClassLinker::RegisterNative(
465     Thread* self, ArtMethod* method, const void* native_method) {
466   CHECK(method->IsNative()) << method->PrettyMethod();
467   CHECK(native_method != nullptr) << method->PrettyMethod();
468   void* new_native_method = nullptr;
469   Runtime* runtime = Runtime::Current();
470   runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
471                                                        native_method,
472                                                        /*out*/&new_native_method);
473   if (method->IsCriticalNative()) {
474     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
475     // Remove old registered method if any.
476     auto it = critical_native_code_with_clinit_check_.find(method);
477     if (it != critical_native_code_with_clinit_check_.end()) {
478       critical_native_code_with_clinit_check_.erase(it);
479     }
480     // To ensure correct memory visibility, we need the class to be visibly
481     // initialized before we can set the JNI entrypoint.
482     if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
483       method->SetEntryPointFromJni(new_native_method);
484     } else {
485       critical_native_code_with_clinit_check_.emplace(method, new_native_method);
486     }
487   } else {
488     method->SetEntryPointFromJni(new_native_method);
489   }
490   return new_native_method;
491 }
492 
UnregisterNative(Thread * self,ArtMethod * method)493 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
494   CHECK(method->IsNative()) << method->PrettyMethod();
495   // Restore stub to lookup native pointer via dlsym.
496   if (method->IsCriticalNative()) {
497     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
498     auto it = critical_native_code_with_clinit_check_.find(method);
499     if (it != critical_native_code_with_clinit_check_.end()) {
500       critical_native_code_with_clinit_check_.erase(it);
501     }
502     method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
503   } else {
504     method->SetEntryPointFromJni(GetJniDlsymLookupStub());
505   }
506 }
507 
GetRegisteredNative(Thread * self,ArtMethod * method)508 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
509   if (method->IsCriticalNative()) {
510     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
511     auto it = critical_native_code_with_clinit_check_.find(method);
512     if (it != critical_native_code_with_clinit_check_.end()) {
513       return it->second;
514     }
515     const void* native_code = method->GetEntryPointFromJni();
516     return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
517   } else {
518     const void* native_code = method->GetEntryPointFromJni();
519     return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
520   }
521 }
522 
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)523 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
524                                            bool wrap_in_no_class_def,
525                                            bool log) {
526   // The class failed to initialize on a previous attempt, so we want to throw
527   // a NoClassDefFoundError (v2 2.17.5).  The exception to this rule is if we
528   // failed in verification, in which case v2 5.4.1 says we need to re-throw
529   // the previous error.
530   Runtime* const runtime = Runtime::Current();
531   if (!runtime->IsAotCompiler()) {  // Give info if this occurs at runtime.
532     std::string extra;
533     ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
534     if (verify_error != nullptr) {
535       if (verify_error->IsClass()) {
536         extra = mirror::Class::PrettyDescriptor(verify_error->AsClass());
537       } else {
538         extra = verify_error->AsThrowable()->Dump();
539       }
540     }
541     if (log) {
542       LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
543                 << ": " << extra;
544     }
545   }
546 
547   CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
548   Thread* self = Thread::Current();
549   if (runtime->IsAotCompiler()) {
550     // At compile time, accurate errors and NCDFE are disabled to speed compilation.
551     ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
552     self->SetException(pre_allocated);
553   } else {
554     ObjPtr<mirror::Object> verify_error = GetVerifyError(c);
555     if (verify_error != nullptr) {
556       // Rethrow stored error.
557       HandleEarlierVerifyError(self, this, c);
558     }
559     // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
560     // might have meant to go down the earlier if statement with the original error but it got
561     // swallowed by the OOM so we end up here.
562     if (verify_error == nullptr || wrap_in_no_class_def) {
563       // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
564       // the top-level exception must be a NoClassDefFoundError. The potentially already pending
565       // exception will be a cause.
566       self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
567                                      c->PrettyDescriptor().c_str());
568     }
569   }
570 }
571 
VlogClassInitializationFailure(Handle<mirror::Class> klass)572 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
573     REQUIRES_SHARED(Locks::mutator_lock_) {
574   if (VLOG_IS_ON(class_linker)) {
575     std::string temp;
576     LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
577               << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
578   }
579 }
580 
WrapExceptionInInitializer(Handle<mirror::Class> klass)581 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
582     REQUIRES_SHARED(Locks::mutator_lock_) {
583   Thread* self = Thread::Current();
584   JNIEnv* env = self->GetJniEnv();
585 
586   ScopedLocalRef<jthrowable> cause(env, env->ExceptionOccurred());
587   CHECK(cause.get() != nullptr);
588 
589   // Boot classpath classes should not fail initialization. This is a consistency debug check.
590   // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
591   if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
592     std::string tmp;
593     // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
594     // make sure to only do it if we don't have AsyncExceptions being thrown around since those
595     // could have caused the error.
596     bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
597     LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
598                                             << " failed initialization: "
599                                             << self->GetException()->Dump();
600   }
601 
602   env->ExceptionClear();
603   bool is_error = env->IsInstanceOf(cause.get(), WellKnownClasses::java_lang_Error);
604   env->Throw(cause.get());
605 
606   // We only wrap non-Error exceptions; an Error can just be used as-is.
607   if (!is_error) {
608     self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
609   }
610   VlogClassInitializationFailure(klass);
611 }
612 
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)613 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
614     : boot_class_table_(new ClassTable()),
615       failed_dex_cache_class_lookups_(0),
616       class_roots_(nullptr),
617       find_array_class_cache_next_victim_(0),
618       init_done_(false),
619       log_new_roots_(false),
620       intern_table_(intern_table),
621       fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
622       jni_dlsym_lookup_trampoline_(nullptr),
623       jni_dlsym_lookup_critical_trampoline_(nullptr),
624       quick_resolution_trampoline_(nullptr),
625       quick_imt_conflict_trampoline_(nullptr),
626       quick_generic_jni_trampoline_(nullptr),
627       quick_to_interpreter_bridge_trampoline_(nullptr),
628       nterp_trampoline_(nullptr),
629       image_pointer_size_(kRuntimePointerSize),
630       visibly_initialized_callback_lock_("visibly initialized callback lock"),
631       visibly_initialized_callback_(nullptr),
632       critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
633       critical_native_code_with_clinit_check_(),
634       cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
635   // For CHA disabled during Aot, see b/34193647.
636 
637   CHECK(intern_table_ != nullptr);
638   static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
639                 "Array cache size wrong.");
640   std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
641 }
642 
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)643 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
644   ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
645   if (c2 == nullptr) {
646     LOG(FATAL) << "Could not find class " << descriptor;
647     UNREACHABLE();
648   }
649   if (c1.Get() != c2) {
650     std::ostringstream os1, os2;
651     c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
652     c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
653     LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
654                << ". This is most likely the result of a broken build. Make sure that "
655                << "libcore and art projects match.\n\n"
656                << os1.str() << "\n\n" << os2.str();
657     UNREACHABLE();
658   }
659 }
660 
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)661 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
662                                    std::string* error_msg) {
663   VLOG(startup) << "ClassLinker::Init";
664 
665   Thread* const self = Thread::Current();
666   Runtime* const runtime = Runtime::Current();
667   gc::Heap* const heap = runtime->GetHeap();
668 
669   CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
670   CHECK(!init_done_);
671 
672   // Use the pointer size from the runtime since we are probably creating the image.
673   image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
674 
675   // java_lang_Class comes first, it's needed for AllocClass
676   // The GC can't handle an object with a null class since we can't get the size of this object.
677   heap->IncrementDisableMovingGC(self);
678   StackHandleScope<64> hs(self);  // 64 is picked arbitrarily.
679   auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
680   // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
681   // the incorrect result when comparing to-space vs from-space.
682   Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
683       heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
684   CHECK(java_lang_Class != nullptr);
685   java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
686   java_lang_Class->SetClass(java_lang_Class.Get());
687   if (kUseBakerReadBarrier) {
688     java_lang_Class->AssertReadBarrierState();
689   }
690   java_lang_Class->SetClassSize(class_class_size);
691   java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
692   heap->DecrementDisableMovingGC(self);
693   // AllocClass(ObjPtr<mirror::Class>) can now be used
694 
695   // Class[] is used for reflection support.
696   auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
697   Handle<mirror::Class> class_array_class(hs.NewHandle(
698       AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
699   class_array_class->SetComponentType(java_lang_Class.Get());
700 
701   // java_lang_Object comes next so that object_array_class can be created.
702   Handle<mirror::Class> java_lang_Object(hs.NewHandle(
703       AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
704   CHECK(java_lang_Object != nullptr);
705   // backfill Object as the super class of Class.
706   java_lang_Class->SetSuperClass(java_lang_Object.Get());
707   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
708 
709   java_lang_Object->SetObjectSize(sizeof(mirror::Object));
710   // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
711   // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
712   runtime->SetSentinel(heap->AllocNonMovableObject(self,
713                                                    java_lang_Object.Get(),
714                                                    java_lang_Object->GetObjectSize(),
715                                                    VoidFunctor()));
716 
717   // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
718   if (kBitstringSubtypeCheckEnabled) {
719     // It might seem the lock here is unnecessary, however all the SubtypeCheck
720     // functions are annotated to require locks all the way down.
721     //
722     // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
723     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
724     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
725     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
726   }
727 
728   // Object[] next to hold class roots.
729   Handle<mirror::Class> object_array_class(hs.NewHandle(
730       AllocClass(self, java_lang_Class.Get(),
731                  mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
732   object_array_class->SetComponentType(java_lang_Object.Get());
733 
734   // Setup java.lang.String.
735   //
736   // We make this class non-movable for the unlikely case where it were to be
737   // moved by a sticky-bit (minor) collection when using the Generational
738   // Concurrent Copying (CC) collector, potentially creating a stale reference
739   // in the `klass_` field of one of its instances allocated in the Large-Object
740   // Space (LOS) -- see the comment about the dirty card scanning logic in
741   // art::gc::collector::ConcurrentCopying::MarkingPhase.
742   Handle<mirror::Class> java_lang_String(hs.NewHandle(
743       AllocClass</* kMovable= */ false>(
744           self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
745   java_lang_String->SetStringClass();
746   mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
747 
748   // Setup java.lang.ref.Reference.
749   Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
750       AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
751   java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
752   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
753 
754   // Create storage for root classes, save away our work so far (requires descriptors).
755   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
756       mirror::ObjectArray<mirror::Class>::Alloc(self,
757                                                 object_array_class.Get(),
758                                                 static_cast<int32_t>(ClassRoot::kMax)));
759   CHECK(!class_roots_.IsNull());
760   SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
761   SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
762   SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
763   SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
764   SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
765   SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
766 
767   // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
768   java_lang_Object->SetIfTable(AllocIfTable(self, 0));
769 
770   // Create array interface entries to populate once we can load system classes.
771   object_array_class->SetIfTable(AllocIfTable(self, 2));
772   DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
773 
774   // Setup the primitive type classes.
775   CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
776   CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
777   CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
778   CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
779   CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
780   CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
781   CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
782   CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
783   CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
784 
785   // Allocate the primitive array classes. We need only the native pointer
786   // array at this point (int[] or long[], depending on architecture) but
787   // we shall perform the same setup steps for all primitive array classes.
788   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
789   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
790   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
791   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
792   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
793   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
794   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
795   AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
796 
797   // now that these are registered, we can use AllocClass() and AllocObjectArray
798 
799   // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
800   Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
801       AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
802   SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
803   java_lang_DexCache->SetDexCacheClass();
804   java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
805   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
806 
807 
808   // Setup dalvik.system.ClassExt
809   Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
810       AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
811   SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
812   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
813 
814   // Set up array classes for string, field, method
815   Handle<mirror::Class> object_array_string(hs.NewHandle(
816       AllocClass(self, java_lang_Class.Get(),
817                  mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
818   object_array_string->SetComponentType(java_lang_String.Get());
819   SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
820 
821   LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
822   // Create runtime resolution and imt conflict methods.
823   runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
824   runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
825   runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
826 
827   // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
828   // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
829   // these roots.
830   if (boot_class_path.empty()) {
831     *error_msg = "Boot classpath is empty.";
832     return false;
833   }
834   for (auto& dex_file : boot_class_path) {
835     if (dex_file == nullptr) {
836       *error_msg = "Null dex file.";
837       return false;
838     }
839     AppendToBootClassPath(self, dex_file.get());
840     boot_dex_files_.push_back(std::move(dex_file));
841   }
842 
843   // now we can use FindSystemClass
844 
845   // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
846   // we do not need friend classes or a publicly exposed setter.
847   quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
848   if (!runtime->IsAotCompiler()) {
849     // We need to set up the generic trampolines since we don't have an image.
850     jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
851     jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
852     quick_resolution_trampoline_ = GetQuickResolutionStub();
853     quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
854     quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
855     quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
856     nterp_trampoline_ = interpreter::GetNterpEntryPoint();
857   }
858 
859   // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
860   mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
861   CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
862   CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
863   mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
864   CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
865   mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
866   CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
867   CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
868   mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
869   CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
870   CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
871 
872   // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
873   // in class_table_.
874   CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
875 
876   // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
877   // arrays - can't be done until Object has a vtable and component classes are loaded.
878   FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
879   FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
880   FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
881   FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
882   FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
883   FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
884   FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
885   FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
886   FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
887   FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
888   FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
889 
890   // Setup the single, global copy of "iftable".
891   auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
892   CHECK(java_lang_Cloneable != nullptr);
893   auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
894   CHECK(java_io_Serializable != nullptr);
895   // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
896   // crawl up and explicitly list all of the supers as well.
897   object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
898   object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
899 
900   // Check Class[] and Object[]'s interfaces. GetDirectInterface may cause thread suspension.
901   CHECK_EQ(java_lang_Cloneable.Get(),
902            mirror::Class::GetDirectInterface(self, class_array_class.Get(), 0));
903   CHECK_EQ(java_io_Serializable.Get(),
904            mirror::Class::GetDirectInterface(self, class_array_class.Get(), 1));
905   CHECK_EQ(java_lang_Cloneable.Get(),
906            mirror::Class::GetDirectInterface(self, object_array_class.Get(), 0));
907   CHECK_EQ(java_io_Serializable.Get(),
908            mirror::Class::GetDirectInterface(self, object_array_class.Get(), 1));
909 
910   CHECK_EQ(object_array_string.Get(),
911            FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
912 
913   // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
914 
915   // Create java.lang.reflect.Proxy root.
916   SetClassRoot(ClassRoot::kJavaLangReflectProxy,
917                FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
918 
919   // Create java.lang.reflect.Field.class root.
920   ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
921   CHECK(class_root != nullptr);
922   SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
923 
924   // Create java.lang.reflect.Field array root.
925   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
926   CHECK(class_root != nullptr);
927   SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
928 
929   // Create java.lang.reflect.Constructor.class root and array root.
930   class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
931   CHECK(class_root != nullptr);
932   SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
933   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
934   CHECK(class_root != nullptr);
935   SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
936 
937   // Create java.lang.reflect.Method.class root and array root.
938   class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
939   CHECK(class_root != nullptr);
940   SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
941   class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
942   CHECK(class_root != nullptr);
943   SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
944 
945   // Create java.lang.invoke.CallSite.class root
946   class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
947   CHECK(class_root != nullptr);
948   SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
949 
950   // Create java.lang.invoke.MethodType.class root
951   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
952   CHECK(class_root != nullptr);
953   SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
954 
955   // Create java.lang.invoke.MethodHandleImpl.class root
956   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
957   CHECK(class_root != nullptr);
958   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
959   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
960 
961   // Create java.lang.invoke.MethodHandles.Lookup.class root
962   class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
963   CHECK(class_root != nullptr);
964   SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
965 
966   // Create java.lang.invoke.VarHandle.class root
967   class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
968   CHECK(class_root != nullptr);
969   SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
970 
971   // Create java.lang.invoke.FieldVarHandle.class root
972   class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
973   CHECK(class_root != nullptr);
974   SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
975 
976   // Create java.lang.invoke.ArrayElementVarHandle.class root
977   class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
978   CHECK(class_root != nullptr);
979   SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
980 
981   // Create java.lang.invoke.ByteArrayViewVarHandle.class root
982   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
983   CHECK(class_root != nullptr);
984   SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
985 
986   // Create java.lang.invoke.ByteBufferViewVarHandle.class root
987   class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
988   CHECK(class_root != nullptr);
989   SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
990 
991   class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
992   CHECK(class_root != nullptr);
993   SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
994 
995   // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
996   // finish initializing Reference class
997   mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
998   CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
999   CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
1000   CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
1001            mirror::Reference::ClassSize(image_pointer_size_));
1002   class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
1003   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1004   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
1005   class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
1006   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1007   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
1008   class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
1009   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1010   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
1011   class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
1012   CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1013   class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
1014 
1015   // Setup the ClassLoader, verifying the object_size_.
1016   class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
1017   class_root->SetClassLoaderClass();
1018   CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
1019   SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
1020 
1021   // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
1022   // java.lang.StackTraceElement as a convenience.
1023   SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
1024   SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
1025                FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
1026   SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
1027                FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
1028   SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
1029                FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
1030   SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1031                FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
1032 
1033   // Create conflict tables that depend on the class linker.
1034   runtime->FixupConflictTables();
1035 
1036   FinishInit(self);
1037 
1038   VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1039 
1040   return true;
1041 }
1042 
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1043 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1044     REQUIRES_SHARED(Locks::mutator_lock_) {
1045   // Find String.<init> -> StringFactory bindings.
1046   ObjPtr<mirror::Class> string_factory_class =
1047       class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1048   CHECK(string_factory_class != nullptr);
1049   ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1050   WellKnownClasses::InitStringInit(string_class, string_factory_class);
1051   // Update the primordial thread.
1052   self->InitStringEntryPoints();
1053 }
1054 
FinishInit(Thread * self)1055 void ClassLinker::FinishInit(Thread* self) {
1056   VLOG(startup) << "ClassLinker::FinishInit entering";
1057 
1058   CreateStringInitBindings(self, this);
1059 
1060   // Let the heap know some key offsets into java.lang.ref instances
1061   // Note: we hard code the field indexes here rather than using FindInstanceField
1062   // as the types of the field can't be resolved prior to the runtime being
1063   // fully initialized
1064   StackHandleScope<3> hs(self);
1065   Handle<mirror::Class> java_lang_ref_Reference =
1066       hs.NewHandle(GetClassRoot<mirror::Reference>(this));
1067   Handle<mirror::Class> java_lang_ref_FinalizerReference =
1068       hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
1069 
1070   ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
1071   CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1072   CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1073 
1074   ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
1075   CHECK_STREQ(queue->GetName(), "queue");
1076   CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
1077 
1078   ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
1079   CHECK_STREQ(queueNext->GetName(), "queueNext");
1080   CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1081 
1082   ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
1083   CHECK_STREQ(referent->GetName(), "referent");
1084   CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
1085 
1086   ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
1087   CHECK_STREQ(zombie->GetName(), "zombie");
1088   CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
1089 
1090   // ensure all class_roots_ are initialized
1091   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1092     ClassRoot class_root = static_cast<ClassRoot>(i);
1093     ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1094     CHECK(klass != nullptr);
1095     DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1096     // note SetClassRoot does additional validation.
1097     // if possible add new checks there to catch errors early
1098   }
1099 
1100   CHECK(GetArrayIfTable() != nullptr);
1101 
1102   // disable the slow paths in FindClass and CreatePrimitiveClass now
1103   // that Object, Class, and Object[] are setup
1104   init_done_ = true;
1105 
1106   // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1107   // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1108   // ensure that the class will be initialized.
1109   if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1110     verifier::ClassVerifier::Init(this);  // Need to prepare the verifier.
1111 
1112     ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1113     if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1114       // Strange, but don't crash.
1115       LOG(WARNING) << "Could not prepare StackOverflowError.";
1116       self->ClearException();
1117     }
1118   }
1119 
1120   VLOG(startup) << "ClassLinker::FinishInit exiting";
1121 }
1122 
RunRootClinits(Thread * self)1123 void ClassLinker::RunRootClinits(Thread* self) {
1124   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1125     ObjPtr<mirror::Class> c = GetClassRoot(ClassRoot(i), this);
1126     if (!c->IsArrayClass() && !c->IsPrimitive()) {
1127       StackHandleScope<1> hs(self);
1128       Handle<mirror::Class> h_class(hs.NewHandle(c));
1129       if (!EnsureInitialized(self, h_class, true, true)) {
1130         LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1131             << ": " << self->GetException()->Dump();
1132       }
1133     } else {
1134       DCHECK(c->IsInitialized());
1135     }
1136   }
1137 }
1138 
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1139 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1140                                                 PointerSize pointer_size,
1141                                                 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1142     REQUIRES_SHARED(Locks::mutator_lock_) {
1143   ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1144   DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1145   for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1146     const char* name = virtual_methods[i].GetName();
1147     virtual_method_hashes[i] = ComputeModifiedUtf8Hash(name);
1148   }
1149 }
1150 
1151 struct TrampolineCheckData {
1152   const void* quick_resolution_trampoline;
1153   const void* quick_imt_conflict_trampoline;
1154   const void* quick_generic_jni_trampoline;
1155   const void* quick_to_interpreter_bridge_trampoline;
1156   const void* nterp_trampoline;
1157   PointerSize pointer_size;
1158   ArtMethod* m;
1159   bool error;
1160 };
1161 
InitFromBootImage(std::string * error_msg)1162 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1163   VLOG(startup) << __FUNCTION__ << " entering";
1164   CHECK(!init_done_);
1165 
1166   Runtime* const runtime = Runtime::Current();
1167   Thread* const self = Thread::Current();
1168   gc::Heap* const heap = runtime->GetHeap();
1169   std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1170   CHECK(!spaces.empty());
1171   const ImageHeader& image_header = spaces[0]->GetImageHeader();
1172   uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
1173   if (!ValidPointerSize(pointer_size_unchecked)) {
1174     *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
1175     return false;
1176   }
1177   image_pointer_size_ = image_header.GetPointerSize();
1178   if (!runtime->IsAotCompiler()) {
1179     // Only the Aot compiler supports having an image with a different pointer size than the
1180     // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1181     // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1182     if (image_pointer_size_ != kRuntimePointerSize) {
1183       *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1184                                 static_cast<size_t>(image_pointer_size_),
1185                                 sizeof(void*));
1186       return false;
1187     }
1188   }
1189   DCHECK(!runtime->HasResolutionMethod());
1190   runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1191   runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1192   runtime->SetImtUnimplementedMethod(
1193       image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1194   runtime->SetCalleeSaveMethod(
1195       image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1196       CalleeSaveType::kSaveAllCalleeSaves);
1197   runtime->SetCalleeSaveMethod(
1198       image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1199       CalleeSaveType::kSaveRefsOnly);
1200   runtime->SetCalleeSaveMethod(
1201       image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1202       CalleeSaveType::kSaveRefsAndArgs);
1203   runtime->SetCalleeSaveMethod(
1204       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1205       CalleeSaveType::kSaveEverything);
1206   runtime->SetCalleeSaveMethod(
1207       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1208       CalleeSaveType::kSaveEverythingForClinit);
1209   runtime->SetCalleeSaveMethod(
1210       image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1211       CalleeSaveType::kSaveEverythingForSuspendCheck);
1212 
1213   std::vector<const OatFile*> oat_files =
1214       runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1215   DCHECK(!oat_files.empty());
1216   const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1217   jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1218   jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1219   quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1220   quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1221   quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1222   quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1223   nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1224   if (kIsDebugBuild) {
1225     // Check that the other images use the same trampoline.
1226     for (size_t i = 1; i < oat_files.size(); ++i) {
1227       const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1228       const void* ith_jni_dlsym_lookup_trampoline_ =
1229           ith_oat_header.GetJniDlsymLookupTrampoline();
1230       const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1231           ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1232       const void* ith_quick_resolution_trampoline =
1233           ith_oat_header.GetQuickResolutionTrampoline();
1234       const void* ith_quick_imt_conflict_trampoline =
1235           ith_oat_header.GetQuickImtConflictTrampoline();
1236       const void* ith_quick_generic_jni_trampoline =
1237           ith_oat_header.GetQuickGenericJniTrampoline();
1238       const void* ith_quick_to_interpreter_bridge_trampoline =
1239           ith_oat_header.GetQuickToInterpreterBridge();
1240       const void* ith_nterp_trampoline =
1241           ith_oat_header.GetNterpTrampoline();
1242       if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1243           ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1244           ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1245           ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1246           ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1247           ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1248           ith_nterp_trampoline != nterp_trampoline_) {
1249         // Make sure that all methods in this image do not contain those trampolines as
1250         // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1251         TrampolineCheckData data;
1252         data.error = false;
1253         data.pointer_size = GetImagePointerSize();
1254         data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1255         data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1256         data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1257         data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1258         data.nterp_trampoline = ith_nterp_trampoline;
1259         ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1260         auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1261           if (obj->IsClass()) {
1262             ObjPtr<mirror::Class> klass = obj->AsClass();
1263             for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1264               const void* entrypoint =
1265                   m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1266               if (entrypoint == data.quick_resolution_trampoline ||
1267                   entrypoint == data.quick_imt_conflict_trampoline ||
1268                   entrypoint == data.quick_generic_jni_trampoline ||
1269                   entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1270                 data.m = &m;
1271                 data.error = true;
1272                 return;
1273               }
1274             }
1275           }
1276         };
1277         spaces[i]->GetLiveBitmap()->Walk(visitor);
1278         if (data.error) {
1279           ArtMethod* m = data.m;
1280           LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1281           *error_msg = "Found an ArtMethod with a bad entrypoint";
1282           return false;
1283         }
1284       }
1285     }
1286   }
1287 
1288   class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1289       ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1290           image_header.GetImageRoot(ImageHeader::kClassRoots)));
1291   DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1292 
1293   DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1294   ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1295       ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1296           image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1297   runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1298   DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1299 
1300   for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
1301     // Boot class loader, use a null handle.
1302     std::vector<std::unique_ptr<const DexFile>> dex_files;
1303     if (!AddImageSpace(spaces[i],
1304                        ScopedNullHandle<mirror::ClassLoader>(),
1305                        /*out*/&dex_files,
1306                        error_msg)) {
1307       return false;
1308     }
1309     // Append opened dex files at the end.
1310     boot_dex_files_.insert(boot_dex_files_.end(),
1311                            std::make_move_iterator(dex_files.begin()),
1312                            std::make_move_iterator(dex_files.end()));
1313   }
1314   for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
1315     OatDexFile::MadviseDexFile(*dex_file, MadviseState::kMadviseStateAtLoad);
1316   }
1317   InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1318                                       image_pointer_size_,
1319                                       ArrayRef<uint32_t>(object_virtual_method_hashes_));
1320   FinishInit(self);
1321 
1322   VLOG(startup) << __FUNCTION__ << " exiting";
1323   return true;
1324 }
1325 
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1326 void ClassLinker::AddExtraBootDexFiles(
1327     Thread* self,
1328     std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1329   for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1330     AppendToBootClassPath(self, dex_file.get());
1331     if (kIsDebugBuild) {
1332       for (const auto& boot_dex_file : boot_dex_files_) {
1333         DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1334       }
1335     }
1336     boot_dex_files_.push_back(std::move(dex_file));
1337   }
1338 }
1339 
IsBootClassLoader(ScopedObjectAccessAlreadyRunnable & soa,ObjPtr<mirror::ClassLoader> class_loader)1340 bool ClassLinker::IsBootClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
1341                                     ObjPtr<mirror::ClassLoader> class_loader) {
1342   return class_loader == nullptr ||
1343        soa.Decode<mirror::Class>(WellKnownClasses::java_lang_BootClassLoader) ==
1344            class_loader->GetClass();
1345 }
1346 
1347 class CHAOnDeleteUpdateClassVisitor {
1348  public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1349   explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1350       : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1351         pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1352         self_(Thread::Current()) {}
1353 
operator ()(ObjPtr<mirror::Class> klass)1354   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1355     // This class is going to be unloaded. Tell CHA about it.
1356     cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1357     return true;
1358   }
1359  private:
1360   const LinearAlloc* allocator_;
1361   const ClassHierarchyAnalysis* cha_;
1362   const PointerSize pointer_size_;
1363   const Thread* self_;
1364 };
1365 
1366 /*
1367  * A class used to ensure that all references to strings interned in an AppImage have been
1368  * properly recorded in the interned references list, and is only ever run in debug mode.
1369  */
1370 class CountInternedStringReferencesVisitor {
1371  public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1372   CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1373                                        const InternTable::UnorderedSet& image_interns)
1374       : space_(space),
1375         image_interns_(image_interns),
1376         count_(0u) {}
1377 
TestObject(ObjPtr<mirror::Object> referred_obj) const1378   void TestObject(ObjPtr<mirror::Object> referred_obj) const
1379       REQUIRES_SHARED(Locks::mutator_lock_) {
1380     if (referred_obj != nullptr &&
1381         space_.HasAddress(referred_obj.Ptr()) &&
1382         referred_obj->IsString()) {
1383       ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1384       auto it = image_interns_.find(GcRoot<mirror::String>(referred_str));
1385       if (it != image_interns_.end() && it->Read() == referred_str) {
1386         ++count_;
1387       }
1388     }
1389   }
1390 
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1391   void VisitRootIfNonNull(
1392       mirror::CompressedReference<mirror::Object>* root) const
1393       REQUIRES_SHARED(Locks::mutator_lock_) {
1394     if (!root->IsNull()) {
1395       VisitRoot(root);
1396     }
1397   }
1398 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1399   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1400       REQUIRES_SHARED(Locks::mutator_lock_) {
1401     TestObject(root->AsMirrorPtr());
1402   }
1403 
1404   // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1405   void operator()(ObjPtr<mirror::Object> obj,
1406                   MemberOffset offset,
1407                   bool is_static ATTRIBUTE_UNUSED) const
1408       REQUIRES_SHARED(Locks::mutator_lock_) {
1409     // References within image or across images don't need a read barrier.
1410     ObjPtr<mirror::Object> referred_obj =
1411         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1412     TestObject(referred_obj);
1413   }
1414 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1415   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1416                   ObjPtr<mirror::Reference> ref) const
1417       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1418     operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1419   }
1420 
GetCount() const1421   size_t GetCount() const {
1422     return count_;
1423   }
1424 
1425  private:
1426   const gc::space::ImageSpace& space_;
1427   const InternTable::UnorderedSet& image_interns_;
1428   mutable size_t count_;  // Modified from the `const` callbacks.
1429 };
1430 
1431 /*
1432  * This function counts references to strings interned in the AppImage.
1433  * This is used in debug build to check against the number of the recorded references.
1434  */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1435 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1436                                      const InternTable::UnorderedSet& image_interns)
1437     REQUIRES_SHARED(Locks::mutator_lock_) {
1438   const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1439   const ImageHeader& image_header = space.GetImageHeader();
1440   const uint8_t* target_base = space.GetMemMap()->Begin();
1441   const ImageSection& objects_section = image_header.GetObjectsSection();
1442 
1443   auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1444   auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1445 
1446   CountInternedStringReferencesVisitor visitor(space, image_interns);
1447   bitmap->VisitMarkedRange(objects_begin,
1448                            objects_end,
1449                            [&space, &visitor](mirror::Object* obj)
1450     REQUIRES_SHARED(Locks::mutator_lock_) {
1451     if (space.HasAddress(obj)) {
1452       if (obj->IsDexCache()) {
1453         obj->VisitReferences</* kVisitNativeRoots= */ true,
1454                              kVerifyNone,
1455                              kWithoutReadBarrier>(visitor, visitor);
1456       } else {
1457         // Don't visit native roots for non-dex-cache as they can't contain
1458         // native references to strings.  This is verified during compilation
1459         // by ImageWriter::VerifyNativeGCRootInvariants.
1460         obj->VisitReferences</* kVisitNativeRoots= */ false,
1461                              kVerifyNone,
1462                              kWithoutReadBarrier>(visitor, visitor);
1463       }
1464     }
1465   });
1466   return visitor.GetCount();
1467 }
1468 
1469 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1470 static void VisitInternedStringReferences(
1471     gc::space::ImageSpace* space,
1472     const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1473   const uint8_t* target_base = space->Begin();
1474   const ImageSection& sro_section =
1475       space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1476   const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1477 
1478   VLOG(image)
1479       << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1480       << num_string_offsets;
1481 
1482   const auto* sro_base =
1483       reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1484 
1485   for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1486     uint32_t base_offset = sro_base[offset_index].first;
1487 
1488     uint32_t raw_member_offset = sro_base[offset_index].second;
1489     DCHECK_ALIGNED(base_offset, 2);
1490     DCHECK_ALIGNED(raw_member_offset, 2);
1491 
1492     ObjPtr<mirror::Object> obj_ptr =
1493         reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1494     MemberOffset member_offset(raw_member_offset);
1495     ObjPtr<mirror::String> referred_string =
1496         obj_ptr->GetFieldObject<mirror::String,
1497                                 kVerifyNone,
1498                                 kWithoutReadBarrier,
1499                                 /* kIsVolatile= */ false>(member_offset);
1500     DCHECK(referred_string != nullptr);
1501 
1502     ObjPtr<mirror::String> visited = visitor(referred_string);
1503     if (visited != referred_string) {
1504       obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1505                               /* kCheckTransaction= */ false,
1506                               kVerifyNone,
1507                               /* kIsVolatile= */ false>(member_offset, visited);
1508     }
1509   }
1510 }
1511 
VerifyInternedStringReferences(gc::space::ImageSpace * space)1512 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1513     REQUIRES_SHARED(Locks::mutator_lock_) {
1514   InternTable::UnorderedSet image_interns;
1515   const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1516   if (section.Size() > 0) {
1517     size_t read_count;
1518     const uint8_t* data = space->Begin() + section.Offset();
1519     InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1520     image_set.swap(image_interns);
1521   }
1522   size_t num_recorded_refs = 0u;
1523   VisitInternedStringReferences(
1524       space,
1525       [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1526           REQUIRES_SHARED(Locks::mutator_lock_) {
1527         auto it = image_interns.find(GcRoot<mirror::String>(str));
1528         CHECK(it != image_interns.end());
1529         CHECK(it->Read() == str);
1530         ++num_recorded_refs;
1531         return str;
1532       });
1533   size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1534   CHECK_EQ(num_recorded_refs, num_found_refs);
1535 }
1536 
1537 // new_class_set is the set of classes that were read from the class table section in the image.
1538 // If there was no class table section, it is null.
1539 // Note: using a class here to avoid having to make ClassLinker internals public.
1540 class AppImageLoadingHelper {
1541  public:
1542   static void Update(
1543       ClassLinker* class_linker,
1544       gc::space::ImageSpace* space,
1545       Handle<mirror::ClassLoader> class_loader,
1546       Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1547       REQUIRES(!Locks::dex_lock_)
1548       REQUIRES_SHARED(Locks::mutator_lock_);
1549 
1550   static void HandleAppImageStrings(gc::space::ImageSpace* space)
1551       REQUIRES_SHARED(Locks::mutator_lock_);
1552 };
1553 
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)1554 void AppImageLoadingHelper::Update(
1555     ClassLinker* class_linker,
1556     gc::space::ImageSpace* space,
1557     Handle<mirror::ClassLoader> class_loader,
1558     Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1559     REQUIRES(!Locks::dex_lock_)
1560     REQUIRES_SHARED(Locks::mutator_lock_) {
1561   ScopedTrace app_image_timing("AppImage:Updating");
1562 
1563   if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1564     // In debug build, verify the string references before applying
1565     // the Runtime::LoadAppImageStartupCache() option.
1566     VerifyInternedStringReferences(space);
1567   }
1568 
1569   Thread* const self = Thread::Current();
1570   Runtime* const runtime = Runtime::Current();
1571   gc::Heap* const heap = runtime->GetHeap();
1572   const ImageHeader& header = space->GetImageHeader();
1573   {
1574     // Register dex caches with the class loader.
1575     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1576     for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1577       const DexFile* const dex_file = dex_cache->GetDexFile();
1578       {
1579         WriterMutexLock mu2(self, *Locks::dex_lock_);
1580         CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1581         class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1582       }
1583     }
1584   }
1585 
1586   if (ClassLinker::kAppImageMayContainStrings) {
1587     HandleAppImageStrings(space);
1588   }
1589 
1590   if (kVerifyArtMethodDeclaringClasses) {
1591     ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1592     ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1593     gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1594     header.VisitPackedArtMethods([&](ArtMethod& method)
1595         REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1596       ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1597       if (klass != nullptr) {
1598         CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1599       }
1600     }, space->Begin(), kRuntimePointerSize);
1601   }
1602 }
1603 
HandleAppImageStrings(gc::space::ImageSpace * space)1604 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1605   // Iterate over the string reference offsets stored in the image and intern
1606   // the strings they point to.
1607   ScopedTrace timing("AppImage:InternString");
1608 
1609   Runtime* const runtime = Runtime::Current();
1610   InternTable* const intern_table = runtime->GetInternTable();
1611 
1612   // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1613   // for faster lookup.
1614   // TODO: Optimize with a bitmap or bloom filter
1615   SafeMap<mirror::String*, mirror::String*> intern_remap;
1616   auto func = [&](InternTable::UnorderedSet& interns)
1617       REQUIRES_SHARED(Locks::mutator_lock_)
1618       REQUIRES(Locks::intern_table_lock_) {
1619     const size_t non_boot_image_strings = intern_table->CountInterns(
1620         /*visit_boot_images=*/false,
1621         /*visit_non_boot_images=*/true);
1622     VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1623     VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1624     // Visit the smaller of the two sets to compute the intersection.
1625     if (interns.size() < non_boot_image_strings) {
1626       for (auto it = interns.begin(); it != interns.end(); ) {
1627         ObjPtr<mirror::String> string = it->Read();
1628         ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1629         if (existing == nullptr) {
1630           existing = intern_table->LookupStrongLocked(string);
1631         }
1632         if (existing != nullptr) {
1633           intern_remap.Put(string.Ptr(), existing.Ptr());
1634           it = interns.erase(it);
1635         } else {
1636           ++it;
1637         }
1638       }
1639     } else {
1640       intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1641           REQUIRES_SHARED(Locks::mutator_lock_)
1642           REQUIRES(Locks::intern_table_lock_) {
1643         auto it = interns.find(root);
1644         if (it != interns.end()) {
1645           ObjPtr<mirror::String> existing = root.Read();
1646           intern_remap.Put(it->Read(), existing.Ptr());
1647           it = interns.erase(it);
1648         }
1649       }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1650     }
1651     // Consistency check to ensure correctness.
1652     if (kIsDebugBuild) {
1653       for (GcRoot<mirror::String>& root : interns) {
1654         ObjPtr<mirror::String> string = root.Read();
1655         CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1656         CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1657       }
1658     }
1659   };
1660   intern_table->AddImageStringsToTable(space, func);
1661   if (!intern_remap.empty()) {
1662     VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1663     VisitInternedStringReferences(
1664         space,
1665         [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1666           auto it = intern_remap.find(str.Ptr());
1667           if (it != intern_remap.end()) {
1668             return ObjPtr<mirror::String>(it->second);
1669           }
1670           return str;
1671         });
1672   }
1673 }
1674 
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1675 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1676                                                      const char* location,
1677                                                      std::string* error_msg)
1678     REQUIRES_SHARED(Locks::mutator_lock_) {
1679   DCHECK(error_msg != nullptr);
1680   std::unique_ptr<const DexFile> dex_file;
1681   const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
1682   if (oat_dex_file == nullptr) {
1683     return std::unique_ptr<const DexFile>();
1684   }
1685   std::string inner_error_msg;
1686   dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1687   if (dex_file == nullptr) {
1688     *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1689                               location,
1690                               oat_file->GetLocation().c_str(),
1691                               inner_error_msg.c_str());
1692     return std::unique_ptr<const DexFile>();
1693   }
1694 
1695   if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1696     *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1697                               location,
1698                               dex_file->GetLocationChecksum(),
1699                               oat_dex_file->GetDexFileLocationChecksum());
1700     return std::unique_ptr<const DexFile>();
1701   }
1702   return dex_file;
1703 }
1704 
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1705 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1706                                     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1707                                     std::string* error_msg) {
1708   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1709   const ImageHeader& header = space->GetImageHeader();
1710   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1711   DCHECK(dex_caches_object != nullptr);
1712   ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1713       dex_caches_object->AsObjectArray<mirror::DexCache>();
1714   const OatFile* oat_file = space->GetOatFile();
1715   for (auto dex_cache : dex_caches->Iterate()) {
1716     std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1717     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1718                                                              dex_file_location.c_str(),
1719                                                              error_msg);
1720     if (dex_file == nullptr) {
1721       return false;
1722     }
1723     dex_cache->SetDexFile(dex_file.get());
1724     out_dex_files->push_back(std::move(dex_file));
1725   }
1726   return true;
1727 }
1728 
1729 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1730 // together and caches some intermediate results.
1731 class ImageChecker final {
1732  public:
CheckObjects(gc::Heap * heap,ClassLinker * class_linker)1733   static void CheckObjects(gc::Heap* heap, ClassLinker* class_linker)
1734       REQUIRES_SHARED(Locks::mutator_lock_) {
1735     ImageChecker ic(heap, class_linker);
1736     auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1737       DCHECK(obj != nullptr);
1738       CHECK(obj->GetClass() != nullptr) << "Null class in object " << obj;
1739       CHECK(obj->GetClass()->GetClass() != nullptr) << "Null class class " << obj;
1740       if (obj->IsClass()) {
1741         auto klass = obj->AsClass();
1742         for (ArtField& field : klass->GetIFields()) {
1743           CHECK_EQ(field.GetDeclaringClass(), klass);
1744         }
1745         for (ArtField& field : klass->GetSFields()) {
1746           CHECK_EQ(field.GetDeclaringClass(), klass);
1747         }
1748         const PointerSize pointer_size = ic.pointer_size_;
1749         for (ArtMethod& m : klass->GetMethods(pointer_size)) {
1750           ic.CheckArtMethod(&m, klass);
1751         }
1752         ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
1753         if (vtable != nullptr) {
1754           ic.CheckArtMethodPointerArray(vtable, nullptr);
1755         }
1756         if (klass->ShouldHaveImt()) {
1757           ImTable* imt = klass->GetImt(pointer_size);
1758           for (size_t i = 0; i < ImTable::kSize; ++i) {
1759             ic.CheckArtMethod(imt->Get(i, pointer_size), nullptr);
1760           }
1761         }
1762         if (klass->ShouldHaveEmbeddedVTable()) {
1763           for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
1764             ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr);
1765           }
1766         }
1767         ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
1768         for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
1769           if (iftable->GetMethodArrayCount(i) > 0) {
1770             ic.CheckArtMethodPointerArray(iftable->GetMethodArray(i), nullptr);
1771           }
1772         }
1773       }
1774     };
1775     heap->VisitObjects(visitor);
1776   }
1777 
1778  private:
ImageChecker(gc::Heap * heap,ClassLinker * class_linker)1779   ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
1780      :  spaces_(heap->GetBootImageSpaces()),
1781         pointer_size_(class_linker->GetImagePointerSize()) {
1782     space_begin_.reserve(spaces_.size());
1783     method_sections_.reserve(spaces_.size());
1784     runtime_method_sections_.reserve(spaces_.size());
1785     for (gc::space::ImageSpace* space : spaces_) {
1786       space_begin_.push_back(space->Begin());
1787       auto& header = space->GetImageHeader();
1788       method_sections_.push_back(&header.GetMethodsSection());
1789       runtime_method_sections_.push_back(&header.GetRuntimeMethodsSection());
1790     }
1791   }
1792 
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)1793   void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
1794       REQUIRES_SHARED(Locks::mutator_lock_) {
1795     if (m->IsRuntimeMethod()) {
1796       ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked();
1797       CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1798     } else if (m->IsCopied()) {
1799       CHECK(m->GetDeclaringClass() != nullptr) << m->PrettyMethod();
1800     } else if (expected_class != nullptr) {
1801       CHECK_EQ(m->GetDeclaringClassUnchecked(), expected_class) << m->PrettyMethod();
1802     }
1803     if (!spaces_.empty()) {
1804       bool contains = false;
1805       for (size_t i = 0; !contains && i != space_begin_.size(); ++i) {
1806         const size_t offset = reinterpret_cast<uint8_t*>(m) - space_begin_[i];
1807         contains = method_sections_[i]->Contains(offset) ||
1808             runtime_method_sections_[i]->Contains(offset);
1809       }
1810       CHECK(contains) << m << " not found";
1811     }
1812   }
1813 
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,ObjPtr<mirror::Class> expected_class)1814   void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr,
1815                                   ObjPtr<mirror::Class> expected_class)
1816       REQUIRES_SHARED(Locks::mutator_lock_) {
1817     CHECK(arr != nullptr);
1818     for (int32_t j = 0; j < arr->GetLength(); ++j) {
1819       auto* method = arr->GetElementPtrSize<ArtMethod*>(j, pointer_size_);
1820       // expected_class == null means we are a dex cache.
1821       if (expected_class != nullptr) {
1822         CHECK(method != nullptr);
1823       }
1824       if (method != nullptr) {
1825         CheckArtMethod(method, expected_class);
1826       }
1827     }
1828   }
1829 
1830   const std::vector<gc::space::ImageSpace*>& spaces_;
1831   const PointerSize pointer_size_;
1832 
1833   // Cached sections from the spaces.
1834   std::vector<const uint8_t*> space_begin_;
1835   std::vector<const ImageSection*> method_sections_;
1836   std::vector<const ImageSection*> runtime_method_sections_;
1837 };
1838 
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)1839 static void VerifyAppImage(const ImageHeader& header,
1840                            const Handle<mirror::ClassLoader>& class_loader,
1841                            ClassTable* class_table,
1842                            gc::space::ImageSpace* space)
1843     REQUIRES_SHARED(Locks::mutator_lock_) {
1844   header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1845     ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1846     if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1847       CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1848           << mirror::Class::PrettyClass(klass);
1849     }
1850   }, space->Begin(), kRuntimePointerSize);
1851   {
1852     // Verify that all direct interfaces of classes in the class table are also resolved.
1853     std::vector<ObjPtr<mirror::Class>> classes;
1854     auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1855         REQUIRES_SHARED(Locks::mutator_lock_) {
1856       if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1857         classes.push_back(klass);
1858       }
1859       return true;
1860     };
1861     class_table->Visit(verify_direct_interfaces_in_table);
1862     Thread* self = Thread::Current();
1863     for (ObjPtr<mirror::Class> klass : classes) {
1864       for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
1865         CHECK(klass->GetDirectInterface(self, klass, i) != nullptr)
1866             << klass->PrettyDescriptor() << " iface #" << i;
1867       }
1868     }
1869   }
1870 }
1871 
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1872 bool ClassLinker::AddImageSpace(
1873     gc::space::ImageSpace* space,
1874     Handle<mirror::ClassLoader> class_loader,
1875     std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1876     std::string* error_msg) {
1877   DCHECK(out_dex_files != nullptr);
1878   DCHECK(error_msg != nullptr);
1879   const uint64_t start_time = NanoTime();
1880   const bool app_image = class_loader != nullptr;
1881   const ImageHeader& header = space->GetImageHeader();
1882   ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1883   DCHECK(dex_caches_object != nullptr);
1884   Runtime* const runtime = Runtime::Current();
1885   gc::Heap* const heap = runtime->GetHeap();
1886   Thread* const self = Thread::Current();
1887   // Check that the image is what we are expecting.
1888   if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
1889     *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
1890                               static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
1891                               image_pointer_size_);
1892     return false;
1893   }
1894   size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
1895   if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
1896     *error_msg = StringPrintf("Expected %zu image roots but got %d",
1897                               expected_image_roots,
1898                               header.GetImageRoots()->GetLength());
1899     return false;
1900   }
1901   StackHandleScope<3> hs(self);
1902   Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1903       hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1904   Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
1905       header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
1906   MutableHandle<mirror::ClassLoader> image_class_loader(hs.NewHandle(
1907       app_image ? header.GetImageRoot(ImageHeader::kAppImageClassLoader)->AsClassLoader()
1908                 : nullptr));
1909   DCHECK(class_roots != nullptr);
1910   if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
1911     *error_msg = StringPrintf("Expected %d class roots but got %d",
1912                               class_roots->GetLength(),
1913                               static_cast<int32_t>(ClassRoot::kMax));
1914     return false;
1915   }
1916   // Check against existing class roots to make sure they match the ones in the boot image.
1917   ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
1918   for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1919     if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
1920       *error_msg = "App image class roots must have pointer equality with runtime ones.";
1921       return false;
1922     }
1923   }
1924   const OatFile* oat_file = space->GetOatFile();
1925   if (oat_file->GetOatHeader().GetDexFileCount() !=
1926       static_cast<uint32_t>(dex_caches->GetLength())) {
1927     *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
1928                  "image";
1929     return false;
1930   }
1931 
1932   for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1933     std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1934     std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1935                                                              dex_file_location.c_str(),
1936                                                              error_msg);
1937     if (dex_file == nullptr) {
1938       return false;
1939     }
1940 
1941     LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
1942     DCHECK(linear_alloc != nullptr);
1943     DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
1944     {
1945       // Native fields are all null.  Initialize them and allocate native memory.
1946       WriterMutexLock mu(self, *Locks::dex_lock_);
1947       dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
1948     }
1949     if (!app_image) {
1950       // Register dex files, keep track of existing ones that are conflicts.
1951       AppendToBootClassPath(dex_file.get(), dex_cache);
1952     }
1953     out_dex_files->push_back(std::move(dex_file));
1954   }
1955 
1956   if (app_image) {
1957     ScopedObjectAccessUnchecked soa(Thread::Current());
1958     ScopedAssertNoThreadSuspension sants("Checking app image", soa.Self());
1959     if (IsBootClassLoader(soa, image_class_loader.Get())) {
1960       *error_msg = "Unexpected BootClassLoader in app image";
1961       return false;
1962     }
1963   }
1964 
1965   if (kCheckImageObjects) {
1966     if (!app_image) {
1967       ImageChecker::CheckObjects(heap, this);
1968     }
1969   }
1970 
1971   // Set entry point to interpreter if in InterpretOnly mode.
1972   if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
1973     // Set image methods' entry point to interpreter.
1974     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1975       if (!method.IsRuntimeMethod()) {
1976         DCHECK(method.GetDeclaringClass() != nullptr);
1977         if (!method.IsNative() && !method.IsResolutionMethod()) {
1978           method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
1979                                                             image_pointer_size_);
1980         }
1981       }
1982     }, space->Begin(), image_pointer_size_);
1983   }
1984 
1985   if (!runtime->IsAotCompiler()) {
1986     ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
1987     bool can_use_nterp = interpreter::CanRuntimeUseNterp();
1988     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1989       // In the image, the `data` pointer field of the ArtMethod contains the code
1990       // item offset. Change this to the actual pointer to the code item.
1991       if (method.HasCodeItem()) {
1992         const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
1993             reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
1994         method.SetCodeItem(code_item);
1995       }
1996       // Set image methods' entry point that point to the interpreter bridge to the
1997       // nterp entry point.
1998       if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
1999         if (can_use_nterp) {
2000           DCHECK(!NeedsClinitCheckBeforeCall(&method) ||
2001                  method.GetDeclaringClass()->IsVisiblyInitialized());
2002           method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2003         } else {
2004           method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2005         }
2006       }
2007     }, space->Begin(), image_pointer_size_);
2008   }
2009 
2010   if (runtime->IsVerificationSoftFail()) {
2011     header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2012       if (!method.IsNative() && method.IsInvokable()) {
2013         method.ClearSkipAccessChecks();
2014       }
2015     }, space->Begin(), image_pointer_size_);
2016   }
2017 
2018   ClassTable* class_table = nullptr;
2019   {
2020     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2021     class_table = InsertClassTableForClassLoader(class_loader.Get());
2022   }
2023   // If we have a class table section, read it and use it for verification in
2024   // UpdateAppImageClassLoadersAndDexCaches.
2025   ClassTable::ClassSet temp_set;
2026   const ImageSection& class_table_section = header.GetClassTableSection();
2027   const bool added_class_table = class_table_section.Size() > 0u;
2028   if (added_class_table) {
2029     const uint64_t start_time2 = NanoTime();
2030     size_t read_count = 0;
2031     temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2032                                     /*make copy*/false,
2033                                     &read_count);
2034     VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2035   }
2036   if (app_image) {
2037     AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
2038 
2039     {
2040       ScopedTrace trace("AppImage:UpdateClassLoaders");
2041       // Update class loader and resolved strings. If added_class_table is false, the resolved
2042       // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2043       ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2044       for (const ClassTable::TableSlot& root : temp_set) {
2045         // Note: We probably don't need the read barrier unless we copy the app image objects into
2046         // the region space.
2047         ObjPtr<mirror::Class> klass(root.Read());
2048         // Do not update class loader for boot image classes where the app image
2049         // class loader is only the initiating loader but not the defining loader.
2050         // Avoid read barrier since we are comparing against null.
2051         if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
2052           klass->SetClassLoader(loader);
2053         }
2054       }
2055     }
2056 
2057     if (kBitstringSubtypeCheckEnabled) {
2058       // Every class in the app image has initially SubtypeCheckInfo in the
2059       // Uninitialized state.
2060       //
2061       // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2062       // after class initialization is complete. The app image ClassStatus as-is
2063       // are almost all ClassStatus::Initialized, and being in the
2064       // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2065       //
2066       // Force every app image class's SubtypeCheck to be at least kIninitialized.
2067       //
2068       // See also ImageWriter::FixupClass.
2069       ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2070       MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2071       for (const ClassTable::TableSlot& root : temp_set) {
2072         SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2073       }
2074     }
2075   }
2076   if (!oat_file->GetBssGcRoots().empty()) {
2077     // Insert oat file to class table for visiting .bss GC roots.
2078     class_table->InsertOatFile(oat_file);
2079   }
2080 
2081   if (added_class_table) {
2082     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2083     class_table->AddClassSet(std::move(temp_set));
2084   }
2085 
2086   if (kIsDebugBuild && app_image) {
2087     // This verification needs to happen after the classes have been added to the class loader.
2088     // Since it ensures classes are in the class table.
2089     ScopedTrace trace("AppImage:Verify");
2090     VerifyAppImage(header, class_loader, class_table, space);
2091   }
2092 
2093   VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2094   return true;
2095 }
2096 
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2097 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2098   // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2099   // enabling tracing requires the mutator lock, there are no race conditions here.
2100   const bool tracing_enabled = Trace::IsTracingEnabled();
2101   Thread* const self = Thread::Current();
2102   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2103   if (kUseReadBarrier) {
2104     // We do not track new roots for CC.
2105     DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2106                           kVisitRootFlagClearRootLog |
2107                           kVisitRootFlagStartLoggingNewRoots |
2108                           kVisitRootFlagStopLoggingNewRoots));
2109   }
2110   if ((flags & kVisitRootFlagAllRoots) != 0) {
2111     // Argument for how root visiting deals with ArtField and ArtMethod roots.
2112     // There is 3 GC cases to handle:
2113     // Non moving concurrent:
2114     // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2115     // live by the class and class roots.
2116     //
2117     // Moving non-concurrent:
2118     // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2119     // To prevent missing roots, this case needs to ensure that there is no
2120     // suspend points between the point which we allocate ArtMethod arrays and place them in a
2121     // class which is in the class table.
2122     //
2123     // Moving concurrent:
2124     // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2125     // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2126     //
2127     // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2128     // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2129     // these objects.
2130     UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2131     boot_class_table_->VisitRoots(root_visitor);
2132     // If tracing is enabled, then mark all the class loaders to prevent unloading.
2133     if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2134       for (const ClassLoaderData& data : class_loaders_) {
2135         GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2136         root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2137       }
2138     }
2139   } else if (!kUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2140     for (auto& root : new_class_roots_) {
2141       ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
2142       root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2143       ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
2144       // Concurrent moving GC marked new roots through the to-space invariant.
2145       CHECK_EQ(new_ref, old_ref);
2146     }
2147     for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2148       for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2149         ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2150         if (old_ref != nullptr) {
2151           DCHECK(old_ref->IsClass());
2152           root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2153           ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2154           // Concurrent moving GC marked new roots through the to-space invariant.
2155           CHECK_EQ(new_ref, old_ref);
2156         }
2157       }
2158     }
2159   }
2160   if (!kUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2161     new_class_roots_.clear();
2162     new_bss_roots_boot_oat_files_.clear();
2163   }
2164   if (!kUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2165     log_new_roots_ = true;
2166   } else if (!kUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2167     log_new_roots_ = false;
2168   }
2169   // We deliberately ignore the class roots in the image since we
2170   // handle image roots by using the MS/CMS rescanning of dirty cards.
2171 }
2172 
2173 // Keep in sync with InitCallback. Anything we visit, we need to
2174 // reinit references to when reinitializing a ClassLinker from a
2175 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags)2176 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
2177   class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2178   VisitClassRoots(visitor, flags);
2179   // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2180   // unloading if we are marking roots.
2181   DropFindArrayClassCache();
2182 }
2183 
2184 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2185  public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2186   explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2187       : visitor_(visitor),
2188         done_(false) {}
2189 
Visit(ObjPtr<mirror::ClassLoader> class_loader)2190   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2191       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2192     ClassTable* const class_table = class_loader->GetClassTable();
2193     if (!done_ && class_table != nullptr) {
2194       DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2195       if (!class_table->Visit(visitor)) {
2196         // If the visitor ClassTable returns false it means that we don't need to continue.
2197         done_ = true;
2198       }
2199     }
2200   }
2201 
2202  private:
2203   // Class visitor that limits the class visits from a ClassTable to the classes with
2204   // the provided defining class loader. This filter is used to avoid multiple visits
2205   // of the same class which can be recorded for multiple initiating class loaders.
2206   class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2207    public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2208     DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2209                                      ClassVisitor* visitor)
2210         : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2211 
operator ()(ObjPtr<mirror::Class> klass)2212     bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2213       if (klass->GetClassLoader() != defining_class_loader_) {
2214         return true;
2215       }
2216       return (*visitor_)(klass);
2217     }
2218 
2219     const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2220     ClassVisitor* const visitor_;
2221   };
2222 
2223   ClassVisitor* const visitor_;
2224   // If done is true then we don't need to do any more visiting.
2225   bool done_;
2226 };
2227 
VisitClassesInternal(ClassVisitor * visitor)2228 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2229   if (boot_class_table_->Visit(*visitor)) {
2230     VisitClassLoaderClassesVisitor loader_visitor(visitor);
2231     VisitClassLoaders(&loader_visitor);
2232   }
2233 }
2234 
VisitClasses(ClassVisitor * visitor)2235 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2236   Thread* const self = Thread::Current();
2237   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2238   // Not safe to have thread suspension when we are holding a lock.
2239   if (self != nullptr) {
2240     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2241     VisitClassesInternal(visitor);
2242   } else {
2243     VisitClassesInternal(visitor);
2244   }
2245 }
2246 
2247 class GetClassesInToVector : public ClassVisitor {
2248  public:
operator ()(ObjPtr<mirror::Class> klass)2249   bool operator()(ObjPtr<mirror::Class> klass) override {
2250     classes_.push_back(klass);
2251     return true;
2252   }
2253   std::vector<ObjPtr<mirror::Class>> classes_;
2254 };
2255 
2256 class GetClassInToObjectArray : public ClassVisitor {
2257  public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2258   explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2259       : arr_(arr), index_(0) {}
2260 
operator ()(ObjPtr<mirror::Class> klass)2261   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2262     ++index_;
2263     if (index_ <= arr_->GetLength()) {
2264       arr_->Set(index_ - 1, klass);
2265       return true;
2266     }
2267     return false;
2268   }
2269 
Succeeded() const2270   bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2271     return index_ <= arr_->GetLength();
2272   }
2273 
2274  private:
2275   mirror::ObjectArray<mirror::Class>* const arr_;
2276   int32_t index_;
2277 };
2278 
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2279 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2280   // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2281   // is avoiding duplicates.
2282   if (!kMovingClasses) {
2283     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2284     GetClassesInToVector accumulator;
2285     VisitClasses(&accumulator);
2286     for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2287       if (!visitor->operator()(klass)) {
2288         return;
2289       }
2290     }
2291   } else {
2292     Thread* const self = Thread::Current();
2293     StackHandleScope<1> hs(self);
2294     auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2295     // We size the array assuming classes won't be added to the class table during the visit.
2296     // If this assumption fails we iterate again.
2297     while (true) {
2298       size_t class_table_size;
2299       {
2300         ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2301         // Add 100 in case new classes get loaded when we are filling in the object array.
2302         class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2303       }
2304       ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2305       classes.Assign(
2306           mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2307       CHECK(classes != nullptr);  // OOME.
2308       GetClassInToObjectArray accumulator(classes.Get());
2309       VisitClasses(&accumulator);
2310       if (accumulator.Succeeded()) {
2311         break;
2312       }
2313     }
2314     for (int32_t i = 0; i < classes->GetLength(); ++i) {
2315       // If the class table shrank during creation of the clases array we expect null elements. If
2316       // the class table grew then the loop repeats. If classes are created after the loop has
2317       // finished then we don't visit.
2318       ObjPtr<mirror::Class> klass = classes->Get(i);
2319       if (klass != nullptr && !visitor->operator()(klass)) {
2320         return;
2321       }
2322     }
2323   }
2324 }
2325 
~ClassLinker()2326 ClassLinker::~ClassLinker() {
2327   Thread* const self = Thread::Current();
2328   for (const ClassLoaderData& data : class_loaders_) {
2329     // CHA unloading analysis is not needed. No negative consequences are expected because
2330     // all the classloaders are deleted at the same time.
2331     DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
2332   }
2333   class_loaders_.clear();
2334   while (!running_visibly_initialized_callbacks_.empty()) {
2335     std::unique_ptr<VisiblyInitializedCallback> callback(
2336         std::addressof(running_visibly_initialized_callbacks_.front()));
2337     running_visibly_initialized_callbacks_.pop_front();
2338   }
2339 }
2340 
DeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2341 void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
2342   Runtime* const runtime = Runtime::Current();
2343   JavaVMExt* const vm = runtime->GetJavaVM();
2344   vm->DeleteWeakGlobalRef(self, data.weak_root);
2345   // Notify the JIT that we need to remove the methods and/or profiling info.
2346   if (runtime->GetJit() != nullptr) {
2347     jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2348     if (code_cache != nullptr) {
2349       // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2350       code_cache->RemoveMethodsIn(self, *data.allocator);
2351     }
2352   } else if (cha_ != nullptr) {
2353     // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2354     cha_->RemoveDependenciesForLinearAlloc(data.allocator);
2355   }
2356   // Cleanup references to single implementation ArtMethods that will be deleted.
2357   if (cleanup_cha) {
2358     CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2359     data.class_table->Visit<CHAOnDeleteUpdateClassVisitor, kWithoutReadBarrier>(visitor);
2360   }
2361   {
2362     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2363     auto end = critical_native_code_with_clinit_check_.end();
2364     for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2365       if (data.allocator->ContainsUnsafe(it->first)) {
2366         it = critical_native_code_with_clinit_check_.erase(it);
2367       } else {
2368         ++it;
2369       }
2370     }
2371   }
2372 
2373   delete data.allocator;
2374   delete data.class_table;
2375 }
2376 
AllocPointerArray(Thread * self,size_t length)2377 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2378   return ObjPtr<mirror::PointerArray>::DownCast(
2379       image_pointer_size_ == PointerSize::k64
2380           ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2381           : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2382 }
2383 
AllocDexCache(Thread * self,const DexFile & dex_file)2384 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2385   StackHandleScope<1> hs(self);
2386   auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2387       GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2388   if (dex_cache == nullptr) {
2389     self->AssertPendingOOMException();
2390     return nullptr;
2391   }
2392   // Use InternWeak() so that the location String can be collected when the ClassLoader
2393   // with this DexCache is collected.
2394   ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2395   if (location == nullptr) {
2396     self->AssertPendingOOMException();
2397     return nullptr;
2398   }
2399   dex_cache->SetLocation(location);
2400   return dex_cache.Get();
2401 }
2402 
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,LinearAlloc * linear_alloc)2403 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
2404                                                                  const DexFile& dex_file,
2405                                                                  LinearAlloc* linear_alloc) {
2406   ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2407   if (dex_cache != nullptr) {
2408     WriterMutexLock mu(self, *Locks::dex_lock_);
2409     dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
2410   }
2411   return dex_cache;
2412 }
2413 
2414 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2415 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2416                                               ObjPtr<mirror::Class> java_lang_Class,
2417                                               uint32_t class_size,
2418                                               const PreFenceVisitor& pre_fence_visitor) {
2419   DCHECK_GE(class_size, sizeof(mirror::Class));
2420   gc::Heap* heap = Runtime::Current()->GetHeap();
2421   ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2422       heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2423       heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2424   if (UNLIKELY(k == nullptr)) {
2425     self->AssertPendingOOMException();
2426     return nullptr;
2427   }
2428   return k->AsClass();
2429 }
2430 
2431 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2432 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2433                                               ObjPtr<mirror::Class> java_lang_Class,
2434                                               uint32_t class_size) {
2435   mirror::Class::InitializeClassVisitor visitor(class_size);
2436   return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2437 }
2438 
AllocClass(Thread * self,uint32_t class_size)2439 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2440   return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2441 }
2442 
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2443 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2444                                            ClassRoot primitive_root,
2445                                            ClassRoot array_root) {
2446   // We make this class non-movable for the unlikely case where it were to be
2447   // moved by a sticky-bit (minor) collection when using the Generational
2448   // Concurrent Copying (CC) collector, potentially creating a stale reference
2449   // in the `klass_` field of one of its instances allocated in the Large-Object
2450   // Space (LOS) -- see the comment about the dirty card scanning logic in
2451   // art::gc::collector::ConcurrentCopying::MarkingPhase.
2452   ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2453       self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2454   ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2455   DCHECK(component_type->IsPrimitive());
2456   array_class->SetComponentType(component_type);
2457   SetClassRoot(array_root, array_class);
2458 }
2459 
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2460 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2461   ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2462   array_class->SetSuperClass(java_lang_Object);
2463   array_class->SetVTable(java_lang_Object->GetVTable());
2464   array_class->SetPrimitiveType(Primitive::kPrimNot);
2465   ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2466   array_class->SetClassFlags(component_type->IsPrimitive()
2467                                  ? mirror::kClassFlagNoReferenceFields
2468                                  : mirror::kClassFlagObjectArray);
2469   array_class->SetClassLoader(component_type->GetClassLoader());
2470   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2471   array_class->PopulateEmbeddedVTable(image_pointer_size_);
2472   ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2473   array_class->SetImt(object_imt, image_pointer_size_);
2474   // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
2475   // the kAccVerificationAttempted flag is added below, and there are no
2476   // methods that need the kAccSkipAccessChecks flag.
2477   DCHECK_EQ(array_class->NumMethods(), 0u);
2478 
2479   // don't need to set new_class->SetObjectSize(..)
2480   // because Object::SizeOf delegates to Array::SizeOf
2481 
2482   // All arrays have java/lang/Cloneable and java/io/Serializable as
2483   // interfaces.  We need to set that up here, so that stuff like
2484   // "instanceof" works right.
2485 
2486   // Use the single, global copies of "interfaces" and "iftable"
2487   // (remember not to free them for arrays).
2488   {
2489     ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2490     CHECK(array_iftable != nullptr);
2491     array_class->SetIfTable(array_iftable);
2492   }
2493 
2494   // Inherit access flags from the component type.
2495   int access_flags = component_type->GetAccessFlags();
2496   // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2497   access_flags &= kAccJavaFlagsMask;
2498   // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2499   // and remove "interface".
2500   access_flags |= kAccAbstract | kAccFinal;
2501   access_flags &= ~kAccInterface;
2502   // Arrays are access-checks-clean and preverified.
2503   access_flags |= kAccVerificationAttempted;
2504 
2505   array_class->SetAccessFlagsDuringLinking(access_flags);
2506 
2507   // Array classes are fully initialized either during single threaded startup,
2508   // or from a pre-fence visitor, so visibly initialized.
2509   array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2510 }
2511 
FinishCoreArrayClassSetup(ClassRoot array_root)2512 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2513   // Do not hold lock on the array class object, the initialization of
2514   // core array classes is done while the process is still single threaded.
2515   ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2516   FinishArrayClassSetup(array_class);
2517 
2518   std::string temp;
2519   const char* descriptor = array_class->GetDescriptor(&temp);
2520   size_t hash = ComputeModifiedUtf8Hash(descriptor);
2521   ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2522   CHECK(existing == nullptr);
2523 }
2524 
AllocStackTraceElementArray(Thread * self,size_t length)2525 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2526     Thread* self,
2527     size_t length) {
2528   return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2529       self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2530 }
2531 
EnsureResolved(Thread * self,const char * descriptor,ObjPtr<mirror::Class> klass)2532 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2533                                                   const char* descriptor,
2534                                                   ObjPtr<mirror::Class> klass) {
2535   DCHECK(klass != nullptr);
2536   if (kIsDebugBuild) {
2537     StackHandleScope<1> hs(self);
2538     HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2539     Thread::PoisonObjectPointersIfDebug();
2540   }
2541 
2542   // For temporary classes we must wait for them to be retired.
2543   if (init_done_ && klass->IsTemp()) {
2544     CHECK(!klass->IsResolved());
2545     if (klass->IsErroneousUnresolved()) {
2546       ThrowEarlierClassFailure(klass);
2547       return nullptr;
2548     }
2549     StackHandleScope<1> hs(self);
2550     Handle<mirror::Class> h_class(hs.NewHandle(klass));
2551     ObjectLock<mirror::Class> lock(self, h_class);
2552     // Loop and wait for the resolving thread to retire this class.
2553     while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2554       lock.WaitIgnoringInterrupts();
2555     }
2556     if (h_class->IsErroneousUnresolved()) {
2557       ThrowEarlierClassFailure(h_class.Get());
2558       return nullptr;
2559     }
2560     CHECK(h_class->IsRetired());
2561     // Get the updated class from class table.
2562     klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2563   }
2564 
2565   // Wait for the class if it has not already been linked.
2566   size_t index = 0;
2567   // Maximum number of yield iterations until we start sleeping.
2568   static const size_t kNumYieldIterations = 1000;
2569   // How long each sleep is in us.
2570   static const size_t kSleepDurationUS = 1000;  // 1 ms.
2571   while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2572     StackHandleScope<1> hs(self);
2573     HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2574     {
2575       ObjectTryLock<mirror::Class> lock(self, h_class);
2576       // Can not use a monitor wait here since it may block when returning and deadlock if another
2577       // thread has locked klass.
2578       if (lock.Acquired()) {
2579         // Check for circular dependencies between classes, the lock is required for SetStatus.
2580         if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2581           ThrowClassCircularityError(h_class.Get());
2582           mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2583           return nullptr;
2584         }
2585       }
2586     }
2587     {
2588       // Handle wrapper deals with klass moving.
2589       ScopedThreadSuspension sts(self, kSuspended);
2590       if (index < kNumYieldIterations) {
2591         sched_yield();
2592       } else {
2593         usleep(kSleepDurationUS);
2594       }
2595     }
2596     ++index;
2597   }
2598 
2599   if (klass->IsErroneousUnresolved()) {
2600     ThrowEarlierClassFailure(klass);
2601     return nullptr;
2602   }
2603   // Return the loaded class.  No exceptions should be pending.
2604   CHECK(klass->IsResolved()) << klass->PrettyClass();
2605   self->AssertNoPendingException();
2606   return klass;
2607 }
2608 
2609 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2610 
2611 // Search a collection of DexFiles for a descriptor
FindInClassPath(const char * descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2612 ClassPathEntry FindInClassPath(const char* descriptor,
2613                                size_t hash, const std::vector<const DexFile*>& class_path) {
2614   for (const DexFile* dex_file : class_path) {
2615     DCHECK(dex_file != nullptr);
2616     const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2617     if (dex_class_def != nullptr) {
2618       return ClassPathEntry(dex_file, dex_class_def);
2619     }
2620   }
2621   return ClassPathEntry(nullptr, nullptr);
2622 }
2623 
FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2624 bool ClassLinker::FindClassInSharedLibraries(ScopedObjectAccessAlreadyRunnable& soa,
2625                                              Thread* self,
2626                                              const char* descriptor,
2627                                              size_t hash,
2628                                              Handle<mirror::ClassLoader> class_loader,
2629                                              /*out*/ ObjPtr<mirror::Class>* result) {
2630   ArtField* field =
2631       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
2632   ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2633   if (raw_shared_libraries == nullptr) {
2634     return true;
2635   }
2636 
2637   StackHandleScope<2> hs(self);
2638   Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2639       hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2640   MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
2641   for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2642     temp_loader.Assign(loader);
2643     if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, temp_loader, result)) {
2644       return false;  // One of the shared libraries is not supported.
2645     }
2646     if (*result != nullptr) {
2647       return true;  // Found the class up the chain.
2648     }
2649   }
2650   return true;
2651 }
2652 
FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable & soa,Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2653 bool ClassLinker::FindClassInBaseDexClassLoader(ScopedObjectAccessAlreadyRunnable& soa,
2654                                                 Thread* self,
2655                                                 const char* descriptor,
2656                                                 size_t hash,
2657                                                 Handle<mirror::ClassLoader> class_loader,
2658                                                 /*out*/ ObjPtr<mirror::Class>* result) {
2659   // Termination case: boot class loader.
2660   if (IsBootClassLoader(soa, class_loader.Get())) {
2661     *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2662     return true;
2663   }
2664 
2665   if (IsPathOrDexClassLoader(soa, class_loader) || IsInMemoryDexClassLoader(soa, class_loader)) {
2666     // For regular path or dex class loader the search order is:
2667     //    - parent
2668     //    - shared libraries
2669     //    - class loader dex files
2670 
2671     // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2672     StackHandleScope<1> hs(self);
2673     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2674     if (!FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result)) {
2675       return false;  // One of the parents is not supported.
2676     }
2677     if (*result != nullptr) {
2678       return true;  // Found the class up the chain.
2679     }
2680 
2681     if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2682       return false;  // One of the shared library loader is not supported.
2683     }
2684     if (*result != nullptr) {
2685       return true;  // Found the class in a shared library.
2686     }
2687 
2688     // Search the current class loader classpath.
2689     *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2690     return !soa.Self()->IsExceptionPending();
2691   }
2692 
2693   if (IsDelegateLastClassLoader(soa, class_loader)) {
2694     // For delegate last, the search order is:
2695     //    - boot class path
2696     //    - shared libraries
2697     //    - class loader dex files
2698     //    - parent
2699     *result = FindClassInBootClassLoaderClassPath(self, descriptor, hash);
2700     if (*result != nullptr) {
2701       return true;  // The class is part of the boot class path.
2702     }
2703     if (self->IsExceptionPending()) {
2704       // Pending exception means there was an error other than ClassNotFound that must be returned
2705       // to the caller.
2706       return false;
2707     }
2708 
2709     if (!FindClassInSharedLibraries(soa, self, descriptor, hash, class_loader, result)) {
2710       return false;  // One of the shared library loader is not supported.
2711     }
2712     if (*result != nullptr) {
2713       return true;  // Found the class in a shared library.
2714     }
2715 
2716     *result = FindClassInBaseDexClassLoaderClassPath(soa, descriptor, hash, class_loader);
2717     if (*result != nullptr) {
2718       return true;  // Found the class in the current class loader
2719     }
2720     if (self->IsExceptionPending()) {
2721       // Pending exception means there was an error other than ClassNotFound that must be returned
2722       // to the caller.
2723       return false;
2724     }
2725 
2726     // Handles as RegisterDexFile may allocate dex caches (and cause thread suspension).
2727     StackHandleScope<1> hs(self);
2728     Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
2729     return FindClassInBaseDexClassLoader(soa, self, descriptor, hash, h_parent, result);
2730   }
2731 
2732   // Unsupported class loader.
2733   *result = nullptr;
2734   return false;
2735 }
2736 
2737 namespace {
2738 
2739 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)2740 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2741                                                   ClassLinker* class_linker)
2742     REQUIRES_SHARED(Locks::mutator_lock_) {
2743   return
2744       // ClassNotFoundException.
2745       throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2746                                          class_linker))
2747       ||
2748       // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2749       throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2750 }
2751 
2752 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)2753 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2754     REQUIRES_SHARED(Locks::mutator_lock_) {
2755   if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2756     self->ClearException();
2757   }
2758 }
2759 
2760 }  // namespace
2761 
2762 // Finds the class in the boot class loader.
2763 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash)2764 ObjPtr<mirror::Class> ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2765                                                                        const char* descriptor,
2766                                                                        size_t hash) {
2767   ObjPtr<mirror::Class> result = nullptr;
2768   ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2769   if (pair.second != nullptr) {
2770     ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2771     if (klass != nullptr) {
2772       result = EnsureResolved(self, descriptor, klass);
2773     } else {
2774       result = DefineClass(self,
2775                            descriptor,
2776                            hash,
2777                            ScopedNullHandle<mirror::ClassLoader>(),
2778                            *pair.first,
2779                            *pair.second);
2780     }
2781     if (result == nullptr) {
2782       CHECK(self->IsExceptionPending()) << descriptor;
2783       FilterDexFileCaughtExceptions(self, this);
2784     }
2785   }
2786   return result;
2787 }
2788 
FindClassInBaseDexClassLoaderClassPath(ScopedObjectAccessAlreadyRunnable & soa,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)2789 ObjPtr<mirror::Class> ClassLinker::FindClassInBaseDexClassLoaderClassPath(
2790     ScopedObjectAccessAlreadyRunnable& soa,
2791     const char* descriptor,
2792     size_t hash,
2793     Handle<mirror::ClassLoader> class_loader) {
2794   DCHECK(IsPathOrDexClassLoader(soa, class_loader) ||
2795          IsInMemoryDexClassLoader(soa, class_loader) ||
2796          IsDelegateLastClassLoader(soa, class_loader))
2797       << "Unexpected class loader for descriptor " << descriptor;
2798 
2799   const DexFile* dex_file = nullptr;
2800   const dex::ClassDef* class_def = nullptr;
2801   ObjPtr<mirror::Class> ret;
2802   auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2803     const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2804     if (cp_class_def != nullptr) {
2805       dex_file = cp_dex_file;
2806       class_def = cp_class_def;
2807       return false;  // Found a class definition, stop visit.
2808     }
2809     return true;  // Continue with the next DexFile.
2810   };
2811   VisitClassLoaderDexFiles(soa, class_loader, find_class_def);
2812 
2813   ObjPtr<mirror::Class> klass = nullptr;
2814   if (class_def != nullptr) {
2815     klass = DefineClass(soa.Self(), descriptor, hash, class_loader, *dex_file, *class_def);
2816     if (UNLIKELY(klass == nullptr)) {
2817       CHECK(soa.Self()->IsExceptionPending()) << descriptor;
2818       FilterDexFileCaughtExceptions(soa.Self(), this);
2819     } else {
2820       DCHECK(!soa.Self()->IsExceptionPending());
2821     }
2822   }
2823   return klass;
2824 }
2825 
FindClass(Thread * self,const char * descriptor,Handle<mirror::ClassLoader> class_loader)2826 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2827                                              const char* descriptor,
2828                                              Handle<mirror::ClassLoader> class_loader) {
2829   DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
2830   DCHECK(self != nullptr);
2831   self->AssertNoPendingException();
2832   self->PoisonObjectPointers();  // For DefineClass, CreateArrayClass, etc...
2833   if (descriptor[1] == '\0') {
2834     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
2835     // for primitive classes that aren't backed by dex files.
2836     return FindPrimitiveClass(descriptor[0]);
2837   }
2838   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
2839   // Find the class in the loaded classes table.
2840   ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
2841   if (klass != nullptr) {
2842     return EnsureResolved(self, descriptor, klass);
2843   }
2844   // Class is not yet loaded.
2845   if (descriptor[0] != '[' && class_loader == nullptr) {
2846     // Non-array class and the boot class loader, search the boot class path.
2847     ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2848     if (pair.second != nullptr) {
2849       return DefineClass(self,
2850                          descriptor,
2851                          hash,
2852                          ScopedNullHandle<mirror::ClassLoader>(),
2853                          *pair.first,
2854                          *pair.second);
2855     } else {
2856       // The boot class loader is searched ahead of the application class loader, failures are
2857       // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
2858       // trigger the chaining with a proper stack trace.
2859       ObjPtr<mirror::Throwable> pre_allocated =
2860           Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2861       self->SetException(pre_allocated);
2862       return nullptr;
2863     }
2864   }
2865   ObjPtr<mirror::Class> result_ptr;
2866   bool descriptor_equals;
2867   if (descriptor[0] == '[') {
2868     result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
2869     DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
2870     DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
2871     descriptor_equals = true;
2872   } else {
2873     ScopedObjectAccessUnchecked soa(self);
2874     bool known_hierarchy =
2875         FindClassInBaseDexClassLoader(soa, self, descriptor, hash, class_loader, &result_ptr);
2876     if (result_ptr != nullptr) {
2877       // The chain was understood and we found the class. We still need to add the class to
2878       // the class table to protect from racy programs that can try and redefine the path list
2879       // which would change the Class<?> returned for subsequent evaluation of const-class.
2880       DCHECK(known_hierarchy);
2881       DCHECK(result_ptr->DescriptorEquals(descriptor));
2882       descriptor_equals = true;
2883     } else if (!self->IsExceptionPending()) {
2884       // Either the chain wasn't understood or the class wasn't found.
2885       // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
2886       // we should return it instead of silently clearing and retrying.
2887       //
2888       // If the chain was understood but we did not find the class, let the Java-side
2889       // rediscover all this and throw the exception with the right stack trace. Note that
2890       // the Java-side could still succeed for racy programs if another thread is actively
2891       // modifying the class loader's path list.
2892 
2893       // The runtime is not allowed to call into java from a runtime-thread so just abort.
2894       if (self->IsRuntimeThread()) {
2895         // Oops, we can't call into java so we can't run actual class-loader code.
2896         // This is true for e.g. for the compiler (jit or aot).
2897         ObjPtr<mirror::Throwable> pre_allocated =
2898             Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
2899         self->SetException(pre_allocated);
2900         return nullptr;
2901       }
2902 
2903       // Inlined DescriptorToDot(descriptor) with extra validation.
2904       //
2905       // Throw NoClassDefFoundError early rather than potentially load a class only to fail
2906       // the DescriptorEquals() check below and give a confusing error message. For example,
2907       // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
2908       // instead of "Ljava/lang/String;", the message below using the "dot" names would be
2909       // "class loader [...] returned class java.lang.String instead of java.lang.String".
2910       size_t descriptor_length = strlen(descriptor);
2911       if (UNLIKELY(descriptor[0] != 'L') ||
2912           UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
2913           UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
2914         ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
2915         return nullptr;
2916       }
2917 
2918       std::string class_name_string(descriptor + 1, descriptor_length - 2);
2919       std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
2920       if (known_hierarchy &&
2921           fast_class_not_found_exceptions_ &&
2922           !Runtime::Current()->IsJavaDebuggable()) {
2923         // For known hierarchy, we know that the class is going to throw an exception. If we aren't
2924         // debuggable, optimize this path by throwing directly here without going back to Java
2925         // language. This reduces how many ClassNotFoundExceptions happen.
2926         self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
2927                                  "%s",
2928                                  class_name_string.c_str());
2929       } else {
2930         ScopedLocalRef<jobject> class_loader_object(
2931             soa.Env(), soa.AddLocalReference<jobject>(class_loader.Get()));
2932         ScopedLocalRef<jobject> result(soa.Env(), nullptr);
2933         {
2934           ScopedThreadStateChange tsc(self, kNative);
2935           ScopedLocalRef<jobject> class_name_object(
2936               soa.Env(), soa.Env()->NewStringUTF(class_name_string.c_str()));
2937           if (class_name_object.get() == nullptr) {
2938             DCHECK(self->IsExceptionPending());  // OOME.
2939             return nullptr;
2940           }
2941           CHECK(class_loader_object.get() != nullptr);
2942           result.reset(soa.Env()->CallObjectMethod(class_loader_object.get(),
2943                                                    WellKnownClasses::java_lang_ClassLoader_loadClass,
2944                                                    class_name_object.get()));
2945         }
2946         if (result.get() == nullptr && !self->IsExceptionPending()) {
2947           // broken loader - throw NPE to be compatible with Dalvik
2948           ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
2949                                                  class_name_string.c_str()).c_str());
2950           return nullptr;
2951         }
2952         result_ptr = soa.Decode<mirror::Class>(result.get());
2953         // Check the name of the returned class.
2954         descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
2955       }
2956     } else {
2957       DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
2958     }
2959   }
2960 
2961   if (self->IsExceptionPending()) {
2962     // If the ClassLoader threw or array class allocation failed, pass that exception up.
2963     // However, to comply with the RI behavior, first check if another thread succeeded.
2964     result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
2965     if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
2966       self->ClearException();
2967       return EnsureResolved(self, descriptor, result_ptr);
2968     }
2969     return nullptr;
2970   }
2971 
2972   // Try to insert the class to the class table, checking for mismatch.
2973   ObjPtr<mirror::Class> old;
2974   {
2975     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2976     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
2977     old = class_table->Lookup(descriptor, hash);
2978     if (old == nullptr) {
2979       old = result_ptr;  // For the comparison below, after releasing the lock.
2980       if (descriptor_equals) {
2981         class_table->InsertWithHash(result_ptr, hash);
2982         WriteBarrier::ForEveryFieldWrite(class_loader.Get());
2983       }  // else throw below, after releasing the lock.
2984     }
2985   }
2986   if (UNLIKELY(old != result_ptr)) {
2987     // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
2988     // capable class loaders.  (All class loaders are considered parallel capable on Android.)
2989     ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
2990     const char* loader_class_name =
2991         loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
2992     LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
2993         << " is not well-behaved; it returned a different Class for racing loadClass(\""
2994         << DescriptorToDot(descriptor) << "\").";
2995     return EnsureResolved(self, descriptor, old);
2996   }
2997   if (UNLIKELY(!descriptor_equals)) {
2998     std::string result_storage;
2999     const char* result_name = result_ptr->GetDescriptor(&result_storage);
3000     std::string loader_storage;
3001     const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3002     ThrowNoClassDefFoundError(
3003         "Initiating class loader of type %s returned class %s instead of %s.",
3004         DescriptorToDot(loader_class_name).c_str(),
3005         DescriptorToDot(result_name).c_str(),
3006         DescriptorToDot(descriptor).c_str());
3007     return nullptr;
3008   }
3009   // Success.
3010   return result_ptr;
3011 }
3012 
3013 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3014 // define-class and how many recursive DefineClasses we are at in order to allow for doing  things
3015 // like pausing class definition.
3016 struct ScopedDefiningClass {
3017  public:
REQUIRES_SHAREDart::ScopedDefiningClass3018   explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3019       : self_(self), returned_(false) {
3020     Locks::mutator_lock_->AssertSharedHeld(self_);
3021     Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3022     self_->IncrDefineClassCount();
3023   }
REQUIRES_SHAREDart::ScopedDefiningClass3024   ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3025     Locks::mutator_lock_->AssertSharedHeld(self_);
3026     CHECK(returned_);
3027   }
3028 
Finishart::ScopedDefiningClass3029   ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3030       REQUIRES_SHARED(Locks::mutator_lock_) {
3031     CHECK(!returned_);
3032     self_->DecrDefineClassCount();
3033     Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3034     Thread::PoisonObjectPointersIfDebug();
3035     returned_ = true;
3036     return h_klass.Get();
3037   }
3038 
Finishart::ScopedDefiningClass3039   ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3040       REQUIRES_SHARED(Locks::mutator_lock_) {
3041     StackHandleScope<1> hs(self_);
3042     Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3043     return Finish(h_klass);
3044   }
3045 
Finishart::ScopedDefiningClass3046   ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3047       REQUIRES_SHARED(Locks::mutator_lock_) {
3048     ScopedNullHandle<mirror::Class> snh;
3049     return Finish(snh);
3050   }
3051 
3052  private:
3053   Thread* self_;
3054   bool returned_;
3055 };
3056 
DefineClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3057 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3058                                                const char* descriptor,
3059                                                size_t hash,
3060                                                Handle<mirror::ClassLoader> class_loader,
3061                                                const DexFile& dex_file,
3062                                                const dex::ClassDef& dex_class_def) {
3063   ScopedDefiningClass sdc(self);
3064   StackHandleScope<3> hs(self);
3065   metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3066   auto klass = hs.NewHandle<mirror::Class>(nullptr);
3067 
3068   // Load the class from the dex file.
3069   if (UNLIKELY(!init_done_)) {
3070     // finish up init of hand crafted class_roots_
3071     if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
3072       klass.Assign(GetClassRoot<mirror::Object>(this));
3073     } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
3074       klass.Assign(GetClassRoot<mirror::Class>(this));
3075     } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3076       klass.Assign(GetClassRoot<mirror::String>(this));
3077     } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
3078       klass.Assign(GetClassRoot<mirror::Reference>(this));
3079     } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
3080       klass.Assign(GetClassRoot<mirror::DexCache>(this));
3081     } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
3082       klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3083     }
3084   }
3085 
3086   // For AOT-compilation of an app, we may use a shortened boot class path that excludes
3087   // some runtime modules. Prevent definition of classes in app class loader that could clash
3088   // with these modules as these classes could be resolved differently during execution.
3089   if (class_loader != nullptr &&
3090       Runtime::Current()->IsAotCompiler() &&
3091       IsUpdatableBootClassPathDescriptor(descriptor)) {
3092     ObjPtr<mirror::Throwable> pre_allocated =
3093         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3094     self->SetException(pre_allocated);
3095     return sdc.Finish(nullptr);
3096   }
3097 
3098   // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3099   // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3100   // public class path then we prevent the definition of the class.
3101   //
3102   // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3103   // classpath is not checked.
3104   if (class_loader == nullptr &&
3105       Runtime::Current()->IsAotCompiler() &&
3106       DenyAccessBasedOnPublicSdk(descriptor)) {
3107     ObjPtr<mirror::Throwable> pre_allocated =
3108         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3109     self->SetException(pre_allocated);
3110     return sdc.Finish(nullptr);
3111   }
3112 
3113   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3114   // code to be executed. We put it up here so we can avoid all the allocations associated with
3115   // creating the class. This can happen with (eg) jit threads.
3116   if (!self->CanLoadClasses()) {
3117     // Make sure we don't try to load anything, potentially causing an infinite loop.
3118     ObjPtr<mirror::Throwable> pre_allocated =
3119         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3120     self->SetException(pre_allocated);
3121     return sdc.Finish(nullptr);
3122   }
3123 
3124   if (klass == nullptr) {
3125     // Allocate a class with the status of not ready.
3126     // Interface object should get the right size here. Regular class will
3127     // figure out the right size later and be replaced with one of the right
3128     // size when the class becomes resolved.
3129     if (CanAllocClass()) {
3130       klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3131     } else {
3132       return sdc.Finish(nullptr);
3133     }
3134   }
3135   if (UNLIKELY(klass == nullptr)) {
3136     self->AssertPendingOOMException();
3137     return sdc.Finish(nullptr);
3138   }
3139   // Get the real dex file. This will return the input if there aren't any callbacks or they do
3140   // nothing.
3141   DexFile const* new_dex_file = nullptr;
3142   dex::ClassDef const* new_class_def = nullptr;
3143   // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3144   // will only be called once.
3145   Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3146                                                             klass,
3147                                                             class_loader,
3148                                                             dex_file,
3149                                                             dex_class_def,
3150                                                             &new_dex_file,
3151                                                             &new_class_def);
3152   // Check to see if an exception happened during runtime callbacks. Return if so.
3153   if (self->IsExceptionPending()) {
3154     return sdc.Finish(nullptr);
3155   }
3156   ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3157   if (dex_cache == nullptr) {
3158     self->AssertPendingException();
3159     return sdc.Finish(nullptr);
3160   }
3161   klass->SetDexCache(dex_cache);
3162   SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3163 
3164   // Mark the string class by setting its access flag.
3165   if (UNLIKELY(!init_done_)) {
3166     if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3167       klass->SetStringClass();
3168     }
3169   }
3170 
3171   ObjectLock<mirror::Class> lock(self, klass);
3172   klass->SetClinitThreadId(self->GetTid());
3173   // Make sure we have a valid empty iftable even if there are errors.
3174   klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3175 
3176   // Add the newly loaded class to the loaded classes table.
3177   ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
3178   if (existing != nullptr) {
3179     // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3180     // this thread to block.
3181     return sdc.Finish(EnsureResolved(self, descriptor, existing));
3182   }
3183 
3184   // Load the fields and other things after we are inserted in the table. This is so that we don't
3185   // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3186   // other reason is that the field roots are only visited from the class table. So we need to be
3187   // inserted before we allocate / fill in these fields.
3188   LoadClass(self, *new_dex_file, *new_class_def, klass);
3189   if (self->IsExceptionPending()) {
3190     VLOG(class_linker) << self->GetException()->Dump();
3191     // An exception occured during load, set status to erroneous while holding klass' lock in case
3192     // notification is necessary.
3193     if (!klass->IsErroneous()) {
3194       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3195     }
3196     return sdc.Finish(nullptr);
3197   }
3198 
3199   // Finish loading (if necessary) by finding parents
3200   CHECK(!klass->IsLoaded());
3201   if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3202     // Loading failed.
3203     if (!klass->IsErroneous()) {
3204       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3205     }
3206     return sdc.Finish(nullptr);
3207   }
3208   CHECK(klass->IsLoaded());
3209 
3210   // At this point the class is loaded. Publish a ClassLoad event.
3211   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3212   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3213 
3214   // Link the class (if necessary)
3215   CHECK(!klass->IsResolved());
3216   // TODO: Use fast jobjects?
3217   auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3218 
3219   MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3220   if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3221     // Linking failed.
3222     if (!klass->IsErroneous()) {
3223       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3224     }
3225     return sdc.Finish(nullptr);
3226   }
3227   self->AssertNoPendingException();
3228   CHECK(h_new_class != nullptr) << descriptor;
3229   CHECK(h_new_class->IsResolved() && !h_new_class->IsErroneousResolved()) << descriptor;
3230 
3231   // Instrumentation may have updated entrypoints for all methods of all
3232   // classes. However it could not update methods of this class while we
3233   // were loading it. Now the class is resolved, we can update entrypoints
3234   // as required by instrumentation.
3235   if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
3236     // We must be in the kRunnable state to prevent instrumentation from
3237     // suspending all threads to update entrypoints while we are doing it
3238     // for this class.
3239     DCHECK_EQ(self->GetState(), kRunnable);
3240     Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3241   }
3242 
3243   /*
3244    * We send CLASS_PREPARE events to the debugger from here.  The
3245    * definition of "preparation" is creating the static fields for a
3246    * class and initializing them to the standard default values, but not
3247    * executing any code (that comes later, during "initialization").
3248    *
3249    * We did the static preparation in LinkClass.
3250    *
3251    * The class has been prepared and resolved but possibly not yet verified
3252    * at this point.
3253    */
3254   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3255 
3256   // Notify native debugger of the new class and its layout.
3257   jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3258 
3259   return sdc.Finish(h_new_class);
3260 }
3261 
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3262 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3263                                                        const dex::ClassDef& dex_class_def) {
3264   size_t num_ref = 0;
3265   size_t num_8 = 0;
3266   size_t num_16 = 0;
3267   size_t num_32 = 0;
3268   size_t num_64 = 0;
3269   ClassAccessor accessor(dex_file, dex_class_def);
3270   // We allow duplicate definitions of the same field in a class_data_item
3271   // but ignore the repeated indexes here, b/21868015.
3272   uint32_t last_field_idx = dex::kDexNoIndex;
3273   for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3274     uint32_t field_idx = field.GetIndex();
3275     // Ordering enforced by DexFileVerifier.
3276     DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3277     if (UNLIKELY(field_idx == last_field_idx)) {
3278       continue;
3279     }
3280     last_field_idx = field_idx;
3281     const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3282     const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3283     char c = descriptor[0];
3284     switch (c) {
3285       case 'L':
3286       case '[':
3287         num_ref++;
3288         break;
3289       case 'J':
3290       case 'D':
3291         num_64++;
3292         break;
3293       case 'I':
3294       case 'F':
3295         num_32++;
3296         break;
3297       case 'S':
3298       case 'C':
3299         num_16++;
3300         break;
3301       case 'B':
3302       case 'Z':
3303         num_8++;
3304         break;
3305       default:
3306         LOG(FATAL) << "Unknown descriptor: " << c;
3307         UNREACHABLE();
3308     }
3309   }
3310   return mirror::Class::ComputeClassSize(false,
3311                                          0,
3312                                          num_8,
3313                                          num_16,
3314                                          num_32,
3315                                          num_64,
3316                                          num_ref,
3317                                          image_pointer_size_);
3318 }
3319 
3320 // Special case to get oat code without overwriting a trampoline.
GetQuickOatCodeFor(ArtMethod * method)3321 const void* ClassLinker::GetQuickOatCodeFor(ArtMethod* method) {
3322   CHECK(method->IsInvokable()) << method->PrettyMethod();
3323   if (method->IsProxyMethod()) {
3324     return GetQuickProxyInvokeHandler();
3325   }
3326   const void* code = method->GetOatMethodQuickCode(GetImagePointerSize());
3327   if (code != nullptr) {
3328     return code;
3329   }
3330 
3331   jit::Jit* jit = Runtime::Current()->GetJit();
3332   if (jit != nullptr) {
3333     code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
3334     if (code != nullptr) {
3335       return code;
3336     }
3337   }
3338 
3339   if (method->IsNative()) {
3340     // No code and native? Use generic trampoline.
3341     return GetQuickGenericJniStub();
3342   }
3343 
3344   if (interpreter::CanRuntimeUseNterp() && CanMethodUseNterp(method)) {
3345     return interpreter::GetNterpEntryPoint();
3346   }
3347 
3348   return GetQuickToInterpreterBridge();
3349 }
3350 
ShouldUseInterpreterEntrypoint(ArtMethod * method,const void * quick_code)3351 bool ClassLinker::ShouldUseInterpreterEntrypoint(ArtMethod* method, const void* quick_code) {
3352   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3353   if (UNLIKELY(method->IsNative() || method->IsProxyMethod())) {
3354     return false;
3355   }
3356 
3357   if (quick_code == nullptr) {
3358     return true;
3359   }
3360 
3361   Runtime* runtime = Runtime::Current();
3362   instrumentation::Instrumentation* instr = runtime->GetInstrumentation();
3363   if (instr->InterpretOnly()) {
3364     return true;
3365   }
3366 
3367   if (runtime->GetClassLinker()->IsQuickToInterpreterBridge(quick_code)) {
3368     // Doing this check avoids doing compiled/interpreter transitions.
3369     return true;
3370   }
3371 
3372   if (Thread::Current()->IsForceInterpreter()) {
3373     // Force the use of interpreter when it is required by the debugger.
3374     return true;
3375   }
3376 
3377   if (Thread::Current()->IsAsyncExceptionPending()) {
3378     // Force use of interpreter to handle async-exceptions
3379     return true;
3380   }
3381 
3382   if (quick_code == GetQuickInstrumentationEntryPoint()) {
3383     const void* instr_target = instr->GetCodeForInvoke(method);
3384     DCHECK_NE(instr_target, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod();
3385     return ShouldUseInterpreterEntrypoint(method, instr_target);
3386   }
3387 
3388   if (runtime->IsJavaDebuggable()) {
3389     // For simplicity, we ignore precompiled code and go to the interpreter
3390     // assuming we don't already have jitted code.
3391     // We could look at the oat file where `quick_code` is being defined,
3392     // and check whether it's been compiled debuggable, but we decided to
3393     // only rely on the JIT for debuggable apps.
3394     jit::Jit* jit = Runtime::Current()->GetJit();
3395     return (jit == nullptr) || !jit->GetCodeCache()->ContainsPc(quick_code);
3396   }
3397 
3398   if (runtime->IsNativeDebuggable()) {
3399     DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
3400     // If we are doing native debugging, ignore application's AOT code,
3401     // since we want to JIT it (at first use) with extra stackmaps for native
3402     // debugging. We keep however all AOT code from the boot image,
3403     // since the JIT-at-first-use is blocking and would result in non-negligible
3404     // startup performance impact.
3405     return !runtime->GetHeap()->IsInBootImageOatFile(quick_code);
3406   }
3407 
3408   return false;
3409 }
3410 
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3411 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3412   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3413   DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3414   size_t num_direct_methods = klass->NumDirectMethods();
3415   if (num_direct_methods == 0) {
3416     return;  // No direct methods => no static methods.
3417   }
3418   if (UNLIKELY(klass->IsProxyClass())) {
3419     return;
3420   }
3421   PointerSize pointer_size = image_pointer_size_;
3422   if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3423                   klass->GetDirectMethods(pointer_size).end(),
3424                   [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3425     // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3426     // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3427     ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3428     ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3429     MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3430     auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3431     while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3432       lb->first->SetEntryPointFromJni(lb->second);
3433       lb = critical_native_code_with_clinit_check_.erase(lb);
3434     }
3435   }
3436   Runtime* runtime = Runtime::Current();
3437   if (!runtime->IsStarted()) {
3438     if (runtime->IsAotCompiler() || runtime->GetHeap()->HasBootImageSpace()) {
3439       return;  // OAT file unavailable.
3440     }
3441   }
3442 
3443   const DexFile& dex_file = klass->GetDexFile();
3444   bool has_oat_class;
3445   OatFile::OatClass oat_class = OatFile::FindOatClass(dex_file,
3446                                                       klass->GetDexClassDefIndex(),
3447                                                       &has_oat_class);
3448   // Link the code of methods skipped by LinkCode.
3449   for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3450     ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3451     if (!method->IsStatic()) {
3452       // Only update static methods.
3453       continue;
3454     }
3455     const void* quick_code = nullptr;
3456 
3457     // In order:
3458     // 1) Check if we have AOT Code.
3459     // 2) Check if we have JIT Code.
3460     // 3) Check if we can use Nterp.
3461     if (has_oat_class) {
3462       OatFile::OatMethod oat_method = oat_class.GetOatMethod(method_index);
3463       quick_code = oat_method.GetQuickCode();
3464     }
3465 
3466     jit::Jit* jit = runtime->GetJit();
3467     if (quick_code == nullptr && jit != nullptr) {
3468       quick_code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
3469     }
3470 
3471     if (quick_code == nullptr &&
3472         interpreter::CanRuntimeUseNterp() &&
3473         CanMethodUseNterp(method)) {
3474       quick_code = interpreter::GetNterpEntryPoint();
3475     }
3476 
3477     // Check whether the method is native, in which case it's generic JNI.
3478     if (quick_code == nullptr && method->IsNative()) {
3479       quick_code = GetQuickGenericJniStub();
3480     } else if (ShouldUseInterpreterEntrypoint(method, quick_code)) {
3481       // Use interpreter entry point.
3482       if (IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode())) {
3483         // If we have the trampoline or the bridge already, no need to update.
3484         // This saves in not dirtying boot image memory.
3485         continue;
3486       }
3487       quick_code = GetQuickToInterpreterBridge();
3488     }
3489     CHECK(quick_code != nullptr);
3490     runtime->GetInstrumentation()->UpdateMethodsCode(method, quick_code);
3491   }
3492   // Ignore virtual methods on the iterator.
3493 }
3494 
3495 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3496 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3497 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3498     REQUIRES_SHARED(Locks::mutator_lock_) {
3499   DCHECK(method != nullptr);
3500   DCHECK(!method->IsInvokable());
3501   method->SetEntryPointFromQuickCompiledCodePtrSize(
3502       class_linker->GetQuickToInterpreterBridgeTrampoline(),
3503       class_linker->GetImagePointerSize());
3504 }
3505 
LinkCode(ClassLinker * class_linker,ArtMethod * method,const OatFile::OatClass * oat_class,uint32_t class_def_method_index)3506 static void LinkCode(ClassLinker* class_linker,
3507                      ArtMethod* method,
3508                      const OatFile::OatClass* oat_class,
3509                      uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
3510   ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3511   Runtime* const runtime = Runtime::Current();
3512   if (runtime->IsAotCompiler()) {
3513     // The following code only applies to a non-compiler runtime.
3514     return;
3515   }
3516 
3517   // Method shouldn't have already been linked.
3518   DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr);
3519 
3520   if (!method->IsInvokable()) {
3521     EnsureThrowsInvocationError(class_linker, method);
3522     return;
3523   }
3524 
3525   const void* quick_code = nullptr;
3526   if (oat_class != nullptr) {
3527     // Every kind of method should at least get an invoke stub from the oat_method.
3528     // non-abstract methods also get their code pointers.
3529     const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3530     quick_code = oat_method.GetQuickCode();
3531   }
3532 
3533   bool enter_interpreter = class_linker->ShouldUseInterpreterEntrypoint(method, quick_code);
3534 
3535   // Note: this mimics the logic in image_writer.cc that installs the resolution
3536   // stub only if we have compiled code and the method needs a class initialization
3537   // check.
3538   if (quick_code == nullptr) {
3539     method->SetEntryPointFromQuickCompiledCode(
3540         method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
3541   } else if (enter_interpreter) {
3542     method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
3543   } else if (NeedsClinitCheckBeforeCall(method)) {
3544     DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized());  // Actually ClassStatus::Idx.
3545     // If we do have code but the method needs a class initialization check before calling
3546     // that code, install the resolution stub that will perform the check.
3547     // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
3548     // after initializing class (see ClassLinker::InitializeClass method).
3549     method->SetEntryPointFromQuickCompiledCode(GetQuickResolutionStub());
3550   } else {
3551     method->SetEntryPointFromQuickCompiledCode(quick_code);
3552   }
3553 
3554   if (method->IsNative()) {
3555     // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3556     // as the extra processing for @CriticalNative is not needed yet.
3557     method->SetEntryPointFromJni(
3558         method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
3559 
3560     if (enter_interpreter || quick_code == nullptr) {
3561       // We have a native method here without code. Then it should have the generic JNI
3562       // trampoline as entrypoint.
3563       // TODO: this doesn't handle all the cases where trampolines may be installed.
3564       DCHECK(class_linker->IsQuickGenericJniStub(method->GetEntryPointFromQuickCompiledCode()));
3565     }
3566   }
3567 }
3568 
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3569 void ClassLinker::SetupClass(const DexFile& dex_file,
3570                              const dex::ClassDef& dex_class_def,
3571                              Handle<mirror::Class> klass,
3572                              ObjPtr<mirror::ClassLoader> class_loader) {
3573   CHECK(klass != nullptr);
3574   CHECK(klass->GetDexCache() != nullptr);
3575   CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3576   const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3577   CHECK(descriptor != nullptr);
3578 
3579   klass->SetClass(GetClassRoot<mirror::Class>(this));
3580   uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3581   CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3582   klass->SetAccessFlagsDuringLinking(access_flags);
3583   klass->SetClassLoader(class_loader);
3584   DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3585   mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3586 
3587   klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3588   klass->SetDexTypeIndex(dex_class_def.class_idx_);
3589 }
3590 
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3591 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3592                                                                LinearAlloc* allocator,
3593                                                                size_t length) {
3594   if (length == 0) {
3595     return nullptr;
3596   }
3597   // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3598   static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3599   size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3600   void* array_storage = allocator->Alloc(self, storage_size);
3601   auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3602   CHECK(ret != nullptr);
3603   std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3604   return ret;
3605 }
3606 
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3607 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3608                                                                  LinearAlloc* allocator,
3609                                                                  size_t length) {
3610   if (length == 0) {
3611     return nullptr;
3612   }
3613   const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3614   const size_t method_size = ArtMethod::Size(image_pointer_size_);
3615   const size_t storage_size =
3616       LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3617   void* array_storage = allocator->Alloc(self, storage_size);
3618   auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3619   CHECK(ret != nullptr);
3620   for (size_t i = 0; i < length; ++i) {
3621     new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3622   }
3623   return ret;
3624 }
3625 
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3626 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3627   if (class_loader == nullptr) {
3628     return Runtime::Current()->GetLinearAlloc();
3629   }
3630   LinearAlloc* allocator = class_loader->GetAllocator();
3631   DCHECK(allocator != nullptr);
3632   return allocator;
3633 }
3634 
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3635 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3636   if (class_loader == nullptr) {
3637     return Runtime::Current()->GetLinearAlloc();
3638   }
3639   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3640   LinearAlloc* allocator = class_loader->GetAllocator();
3641   if (allocator == nullptr) {
3642     RegisterClassLoader(class_loader);
3643     allocator = class_loader->GetAllocator();
3644     CHECK(allocator != nullptr);
3645   }
3646   return allocator;
3647 }
3648 
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)3649 void ClassLinker::LoadClass(Thread* self,
3650                             const DexFile& dex_file,
3651                             const dex::ClassDef& dex_class_def,
3652                             Handle<mirror::Class> klass) {
3653   ClassAccessor accessor(dex_file,
3654                          dex_class_def,
3655                          /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
3656   if (!accessor.HasClassData()) {
3657     return;
3658   }
3659   Runtime* const runtime = Runtime::Current();
3660   {
3661     // Note: We cannot have thread suspension until the field and method arrays are setup or else
3662     // Class::VisitFieldRoots may miss some fields or methods.
3663     ScopedAssertNoThreadSuspension nts(__FUNCTION__);
3664     // Load static fields.
3665     // We allow duplicate definitions of the same field in a class_data_item
3666     // but ignore the repeated indexes here, b/21868015.
3667     LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
3668     LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3669                                                                 allocator,
3670                                                                 accessor.NumStaticFields());
3671     LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3672                                                                 allocator,
3673                                                                 accessor.NumInstanceFields());
3674     size_t num_sfields = 0u;
3675     size_t num_ifields = 0u;
3676     uint32_t last_static_field_idx = 0u;
3677     uint32_t last_instance_field_idx = 0u;
3678 
3679     // Methods
3680     bool has_oat_class = false;
3681     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3682         ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3683         : OatFile::OatClass::Invalid();
3684     const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3685     klass->SetMethodsPtr(
3686         AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3687         accessor.NumDirectMethods(),
3688         accessor.NumVirtualMethods());
3689     size_t class_def_method_index = 0;
3690     uint32_t last_dex_method_index = dex::kDexNoIndex;
3691     size_t last_class_def_method_index = 0;
3692 
3693     // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3694     // methods needs to decode all of the fields.
3695     accessor.VisitFieldsAndMethods([&](
3696         const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3697           uint32_t field_idx = field.GetIndex();
3698           DCHECK_GE(field_idx, last_static_field_idx);  // Ordering enforced by DexFileVerifier.
3699           if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3700             LoadField(field, klass, &sfields->At(num_sfields));
3701             ++num_sfields;
3702             last_static_field_idx = field_idx;
3703           }
3704         }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3705           uint32_t field_idx = field.GetIndex();
3706           DCHECK_GE(field_idx, last_instance_field_idx);  // Ordering enforced by DexFileVerifier.
3707           if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3708             LoadField(field, klass, &ifields->At(num_ifields));
3709             ++num_ifields;
3710             last_instance_field_idx = field_idx;
3711           }
3712         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3713           ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3714               image_pointer_size_);
3715           LoadMethod(dex_file, method, klass, art_method);
3716           LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3717           uint32_t it_method_index = method.GetIndex();
3718           if (last_dex_method_index == it_method_index) {
3719             // duplicate case
3720             art_method->SetMethodIndex(last_class_def_method_index);
3721           } else {
3722             art_method->SetMethodIndex(class_def_method_index);
3723             last_dex_method_index = it_method_index;
3724             last_class_def_method_index = class_def_method_index;
3725           }
3726           ++class_def_method_index;
3727         }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3728           ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3729               class_def_method_index - accessor.NumDirectMethods(),
3730               image_pointer_size_);
3731           LoadMethod(dex_file, method, klass, art_method);
3732           LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3733           ++class_def_method_index;
3734         });
3735 
3736     if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
3737       LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
3738           << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3739           << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3740           << ")";
3741       // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3742       if (sfields != nullptr) {
3743         sfields->SetSize(num_sfields);
3744       }
3745       if (ifields != nullptr) {
3746         ifields->SetSize(num_ifields);
3747       }
3748     }
3749     // Set the field arrays.
3750     klass->SetSFieldsPtr(sfields);
3751     DCHECK_EQ(klass->NumStaticFields(), num_sfields);
3752     klass->SetIFieldsPtr(ifields);
3753     DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
3754   }
3755   // Ensure that the card is marked so that remembered sets pick up native roots.
3756   WriteBarrier::ForEveryFieldWrite(klass.Get());
3757   self->AllowThreadSuspension();
3758 }
3759 
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)3760 void ClassLinker::LoadField(const ClassAccessor::Field& field,
3761                             Handle<mirror::Class> klass,
3762                             ArtField* dst) {
3763   const uint32_t field_idx = field.GetIndex();
3764   dst->SetDexFieldIndex(field_idx);
3765   dst->SetDeclaringClass(klass.Get());
3766 
3767   // Get access flags from the DexFile and set hiddenapi runtime access flags.
3768   dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
3769 }
3770 
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,Handle<mirror::Class> klass,ArtMethod * dst)3771 void ClassLinker::LoadMethod(const DexFile& dex_file,
3772                              const ClassAccessor::Method& method,
3773                              Handle<mirror::Class> klass,
3774                              ArtMethod* dst) {
3775   const uint32_t dex_method_idx = method.GetIndex();
3776   const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
3777   const char* method_name = dex_file.StringDataByIdx(method_id.name_idx_);
3778 
3779   ScopedAssertNoThreadSuspension ants("LoadMethod");
3780   dst->SetDexMethodIndex(dex_method_idx);
3781   dst->SetDeclaringClass(klass.Get());
3782 
3783   // Get access flags from the DexFile and set hiddenapi runtime access flags.
3784   uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
3785 
3786   if (UNLIKELY(strcmp("finalize", method_name) == 0)) {
3787     // Set finalizable flag on declaring class.
3788     if (strcmp("V", dex_file.GetShorty(method_id.proto_idx_)) == 0) {
3789       // Void return type.
3790       if (klass->GetClassLoader() != nullptr) {  // All non-boot finalizer methods are flagged.
3791         klass->SetFinalizable();
3792       } else {
3793         std::string temp;
3794         const char* klass_descriptor = klass->GetDescriptor(&temp);
3795         // The Enum class declares a "final" finalize() method to prevent subclasses from
3796         // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3797         // subclasses, so we exclude it here.
3798         // We also want to avoid setting the flag on Object, where we know that finalize() is
3799         // empty.
3800         if (strcmp(klass_descriptor, "Ljava/lang/Object;") != 0 &&
3801             strcmp(klass_descriptor, "Ljava/lang/Enum;") != 0) {
3802           klass->SetFinalizable();
3803         }
3804       }
3805     }
3806   } else if (method_name[0] == '<') {
3807     // Fix broken access flags for initializers. Bug 11157540.
3808     bool is_init = (strcmp("<init>", method_name) == 0);
3809     bool is_clinit = !is_init && (strcmp("<clinit>", method_name) == 0);
3810     if (UNLIKELY(!is_init && !is_clinit)) {
3811       LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3812     } else {
3813       if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3814         LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
3815             << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
3816         access_flags |= kAccConstructor;
3817       }
3818     }
3819   }
3820   if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3821     // Check if the native method is annotated with @FastNative or @CriticalNative.
3822     access_flags |= annotations::GetNativeMethodAnnotationAccessFlags(
3823         dex_file, dst->GetClassDef(), dex_method_idx);
3824   }
3825   dst->SetAccessFlags(access_flags);
3826   // Must be done after SetAccessFlags since IsAbstract depends on it.
3827   if (klass->IsInterface() && dst->IsAbstract()) {
3828     dst->CalculateAndSetImtIndex();
3829   }
3830   if (dst->HasCodeItem()) {
3831     DCHECK_NE(method.GetCodeItemOffset(), 0u);
3832     if (Runtime::Current()->IsAotCompiler()) {
3833       dst->SetDataPtrSize(reinterpret_cast32<void*>(method.GetCodeItemOffset()), image_pointer_size_);
3834     } else {
3835       dst->SetCodeItem(dst->GetDexFile()->GetCodeItem(method.GetCodeItemOffset()));
3836     }
3837   } else {
3838     dst->SetDataPtrSize(nullptr, image_pointer_size_);
3839     DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3840   }
3841 
3842   // Set optimization flags related to the shorty.
3843   const char* shorty = dst->GetShorty();
3844   bool all_parameters_are_reference = true;
3845   bool all_parameters_are_reference_or_int = true;
3846   bool return_type_is_fp = (shorty[0] == 'F' || shorty[0] == 'D');
3847 
3848   for (size_t i = 1, e = strlen(shorty); i < e; ++i) {
3849     if (shorty[i] != 'L') {
3850       all_parameters_are_reference = false;
3851       if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3852         all_parameters_are_reference_or_int = false;
3853         break;
3854       }
3855     }
3856   }
3857 
3858   if (!dst->IsNative() && all_parameters_are_reference) {
3859     dst->SetNterpEntryPointFastPathFlag();
3860   }
3861 
3862   if (!return_type_is_fp && all_parameters_are_reference_or_int) {
3863     dst->SetNterpInvokeFastPathFlag();
3864   }
3865 }
3866 
AppendToBootClassPath(Thread * self,const DexFile * dex_file)3867 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
3868   ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache(
3869       self,
3870       *dex_file,
3871       Runtime::Current()->GetLinearAlloc());
3872   CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
3873   AppendToBootClassPath(dex_file, dex_cache);
3874 }
3875 
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)3876 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
3877                                         ObjPtr<mirror::DexCache> dex_cache) {
3878   CHECK(dex_file != nullptr);
3879   CHECK(dex_cache != nullptr) << dex_file->GetLocation();
3880   boot_class_path_.push_back(dex_file);
3881   WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
3882   RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
3883 }
3884 
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3885 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
3886                                         ObjPtr<mirror::DexCache> dex_cache,
3887                                         ObjPtr<mirror::ClassLoader> class_loader) {
3888   Thread* const self = Thread::Current();
3889   Locks::dex_lock_->AssertExclusiveHeld(self);
3890   CHECK(dex_cache != nullptr) << dex_file.GetLocation();
3891   CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
3892   // For app images, the dex cache location may be a suffix of the dex file location since the
3893   // dex file location is an absolute path.
3894   const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3895   const size_t dex_cache_length = dex_cache_location.length();
3896   CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3897   std::string dex_file_location = dex_file.GetLocation();
3898   // The following paths checks don't work on preopt when using boot dex files, where the dex
3899   // cache location is the one on device, and the dex_file's location is the one on host.
3900   if (!(Runtime::Current()->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
3901     CHECK_GE(dex_file_location.length(), dex_cache_length)
3902         << dex_cache_location << " " << dex_file.GetLocation();
3903     const std::string dex_file_suffix = dex_file_location.substr(
3904         dex_file_location.length() - dex_cache_length,
3905         dex_cache_length);
3906     // Example dex_cache location is SettingsProvider.apk and
3907     // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3908     CHECK_EQ(dex_cache_location, dex_file_suffix);
3909   }
3910   const OatFile* oat_file =
3911       (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
3912   // Clean up pass to remove null dex caches; null dex caches can occur due to class unloading
3913   // and we are lazily removing null entries. Also check if we need to initialize OatFile data
3914   // (.data.bimg.rel.ro and .bss sections) needed for code execution.
3915   bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
3916   JavaVMExt* const vm = self->GetJniEnv()->GetVm();
3917   for (auto it = dex_caches_.begin(); it != dex_caches_.end(); ) {
3918     DexCacheData data = *it;
3919     if (self->IsJWeakCleared(data.weak_root)) {
3920       vm->DeleteWeakGlobalRef(self, data.weak_root);
3921       it = dex_caches_.erase(it);
3922     } else {
3923       if (initialize_oat_file_data &&
3924           it->dex_file->GetOatDexFile() != nullptr &&
3925           it->dex_file->GetOatDexFile()->GetOatFile() == oat_file) {
3926         initialize_oat_file_data = false;  // Already initialized.
3927       }
3928       ++it;
3929     }
3930   }
3931   if (initialize_oat_file_data) {
3932     oat_file->InitializeRelocations();
3933   }
3934   // Let hiddenapi assign a domain to the newly registered dex file.
3935   hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3936 
3937   jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
3938   DexCacheData data;
3939   data.weak_root = dex_cache_jweak;
3940   data.dex_file = dex_cache->GetDexFile();
3941   data.class_table = ClassTableForClassLoader(class_loader);
3942   AddNativeDebugInfoForDex(self, data.dex_file);
3943   DCHECK(data.class_table != nullptr);
3944   // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3945   // path dex caches without an image.
3946   data.class_table->InsertStrongRoot(dex_cache);
3947   // Make sure that the dex cache holds the classloader live.
3948   dex_cache->SetClassLoader(class_loader);
3949   if (class_loader != nullptr) {
3950     // Since we added a strong root to the class table, do the write barrier as required for
3951     // remembered sets and generational GCs.
3952     WriteBarrier::ForEveryFieldWrite(class_loader);
3953   }
3954   dex_caches_.push_back(data);
3955 }
3956 
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)3957 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
3958   return data != nullptr
3959       ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
3960       : nullptr;
3961 }
3962 
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)3963 bool ClassLinker::IsSameClassLoader(
3964     ObjPtr<mirror::DexCache> dex_cache,
3965     const DexCacheData* data,
3966     ObjPtr<mirror::ClassLoader> class_loader) {
3967   CHECK(data != nullptr);
3968   DCHECK_EQ(dex_cache->GetDexFile(), data->dex_file);
3969   return data->class_table == ClassTableForClassLoader(class_loader);
3970 }
3971 
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)3972 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
3973                                            ObjPtr<mirror::ClassLoader> class_loader) {
3974   SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
3975   Thread* self = Thread::Current();
3976   StackHandleScope<2> hs(self);
3977   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
3978   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
3979   const DexFile* dex_file = dex_cache->GetDexFile();
3980   DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
3981   if (kIsDebugBuild) {
3982     ReaderMutexLock mu(self, *Locks::dex_lock_);
3983     const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
3984     ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
3985     DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
3986                                    << "been registered on dex file " << dex_file->GetLocation();
3987   }
3988   ClassTable* table;
3989   {
3990     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3991     table = InsertClassTableForClassLoader(h_class_loader.Get());
3992   }
3993   // Avoid a deadlock between a garbage collecting thread running a checkpoint,
3994   // a thread holding the dex lock and blocking on a condition variable regarding
3995   // weak references access, and a thread blocking on the dex lock.
3996   gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
3997   WriterMutexLock mu(self, *Locks::dex_lock_);
3998   RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
3999   table->InsertStrongRoot(h_dex_cache.Get());
4000   if (h_class_loader.Get() != nullptr) {
4001     // Since we added a strong root to the class table, do the write barrier as required for
4002     // remembered sets and generational GCs.
4003     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4004   }
4005 }
4006 
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)4007 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4008     REQUIRES_SHARED(Locks::mutator_lock_) {
4009   self->ThrowNewExceptionF("Ljava/lang/InternalError;",
4010                            "Attempt to register dex file %s with multiple class loaders",
4011                            dex_file.GetLocation().c_str());
4012 }
4013 
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)4014 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4015                                                       ObjPtr<mirror::ClassLoader> class_loader) {
4016   Thread* self = Thread::Current();
4017   ObjPtr<mirror::DexCache> old_dex_cache;
4018   bool registered_with_another_class_loader = false;
4019   {
4020     ReaderMutexLock mu(self, *Locks::dex_lock_);
4021     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4022     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4023     if (old_dex_cache != nullptr) {
4024       if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4025         return old_dex_cache;
4026       } else {
4027         // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4028         // be thrown when it's safe to do so to simplify this.
4029         registered_with_another_class_loader = true;
4030       }
4031     }
4032   }
4033   // We need to have released the dex_lock_ to allocate safely.
4034   if (registered_with_another_class_loader) {
4035     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4036     return nullptr;
4037   }
4038   SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
4039   LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4040   DCHECK(linear_alloc != nullptr);
4041   ClassTable* table;
4042   {
4043     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4044     table = InsertClassTableForClassLoader(class_loader);
4045   }
4046   // Don't alloc while holding the lock, since allocation may need to
4047   // suspend all threads and another thread may need the dex_lock_ to
4048   // get to a suspend point.
4049   StackHandleScope<3> hs(self);
4050   Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4051   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
4052   {
4053     // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4054     // a thread holding the dex lock and blocking on a condition variable regarding
4055     // weak references access, and a thread blocking on the dex lock.
4056     gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4057     WriterMutexLock mu(self, *Locks::dex_lock_);
4058     const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4059     old_dex_cache = DecodeDexCacheLocked(self, old_data);
4060     if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
4061       // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it
4062       // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4063       // DCHECK that the arrays are null.
4064       h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
4065       RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
4066     }
4067     if (old_dex_cache != nullptr) {
4068       // Another thread managed to initialize the dex cache faster, so use that DexCache.
4069       // If this thread encountered OOME, ignore it.
4070       DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4071       self->ClearException();
4072       // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4073       // dex_lock_.
4074       if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4075         return old_dex_cache;
4076       } else {
4077         registered_with_another_class_loader = true;
4078       }
4079     }
4080   }
4081   if (registered_with_another_class_loader) {
4082     ThrowDexFileAlreadyRegisteredError(self, dex_file);
4083     return nullptr;
4084   }
4085   if (h_dex_cache == nullptr) {
4086     self->AssertPendingOOMException();
4087     return nullptr;
4088   }
4089   table->InsertStrongRoot(h_dex_cache.Get());
4090   if (h_class_loader.Get() != nullptr) {
4091     // Since we added a strong root to the class table, do the write barrier as required for
4092     // remembered sets and generational GCs.
4093     WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4094   }
4095   VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4096   PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4097   return h_dex_cache.Get();
4098 }
4099 
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4100 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4101   ReaderMutexLock mu(self, *Locks::dex_lock_);
4102   return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4103 }
4104 
FindDexCache(Thread * self,const DexFile & dex_file)4105 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4106   ReaderMutexLock mu(self, *Locks::dex_lock_);
4107   const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4108   ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4109   if (dex_cache != nullptr) {
4110     return dex_cache;
4111   }
4112   // Failure, dump diagnostic and abort.
4113   for (const DexCacheData& data : dex_caches_) {
4114     if (DecodeDexCacheLocked(self, &data) != nullptr) {
4115       LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << data.dex_file->GetLocation();
4116     }
4117   }
4118   LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4119              << " " << &dex_file << " " << dex_cache_data->dex_file;
4120   UNREACHABLE();
4121 }
4122 
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4123 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4124   const DexFile* dex_file = dex_cache->GetDexFile();
4125   DCHECK(dex_file != nullptr);
4126   ReaderMutexLock mu(self, *Locks::dex_lock_);
4127   // Search assuming unique-ness of dex file.
4128   for (const DexCacheData& data : dex_caches_) {
4129     // Avoid decoding (and read barriers) other unrelated dex caches.
4130     if (data.dex_file == dex_file) {
4131       ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4132       if (registered_dex_cache != nullptr) {
4133         CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4134         return data.class_table;
4135       }
4136     }
4137   }
4138   return nullptr;
4139 }
4140 
FindDexCacheDataLocked(const DexFile & dex_file)4141 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4142   // Search assuming unique-ness of dex file.
4143   for (const DexCacheData& data : dex_caches_) {
4144     // Avoid decoding (and read barriers) other unrelated dex caches.
4145     if (data.dex_file == &dex_file) {
4146       return &data;
4147     }
4148   }
4149   return nullptr;
4150 }
4151 
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4152 void ClassLinker::CreatePrimitiveClass(Thread* self,
4153                                        Primitive::Type type,
4154                                        ClassRoot primitive_root) {
4155   ObjPtr<mirror::Class> primitive_class =
4156       AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4157   CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4158   // Do not hold lock on the primitive class object, the initialization of
4159   // primitive classes is done while the process is still single threaded.
4160   primitive_class->SetAccessFlagsDuringLinking(
4161       kAccPublic | kAccFinal | kAccAbstract | kAccVerificationAttempted);
4162   primitive_class->SetPrimitiveType(type);
4163   primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4164   // Skip EnsureSkipAccessChecksMethods(). We can skip the verified status,
4165   // the kAccVerificationAttempted flag was added above, and there are no
4166   // methods that need the kAccSkipAccessChecks flag.
4167   DCHECK_EQ(primitive_class->NumMethods(), 0u);
4168   // Primitive classes are initialized during single threaded startup, so visibly initialized.
4169   primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4170   const char* descriptor = Primitive::Descriptor(type);
4171   ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4172                                                primitive_class,
4173                                                ComputeModifiedUtf8Hash(descriptor));
4174   CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4175   SetClassRoot(primitive_root, primitive_class);
4176 }
4177 
GetArrayIfTable()4178 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4179   return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4180 }
4181 
4182 // Create an array class (i.e. the class object for the array, not the
4183 // array itself).  "descriptor" looks like "[C" or "[[[[B" or
4184 // "[Ljava/lang/String;".
4185 //
4186 // If "descriptor" refers to an array of primitives, look up the
4187 // primitive type's internally-generated class object.
4188 //
4189 // "class_loader" is the class loader of the class that's referring to
4190 // us.  It's used to ensure that we're looking for the element type in
4191 // the right context.  It does NOT become the class loader for the
4192 // array class; that always comes from the base element class.
4193 //
4194 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)4195 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4196                                                     const char* descriptor,
4197                                                     size_t hash,
4198                                                     Handle<mirror::ClassLoader> class_loader) {
4199   // Identify the underlying component type
4200   CHECK_EQ('[', descriptor[0]);
4201   StackHandleScope<2> hs(self);
4202 
4203   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4204   // code to be executed. We put it up here so we can avoid all the allocations associated with
4205   // creating the class. This can happen with (eg) jit threads.
4206   if (!self->CanLoadClasses()) {
4207     // Make sure we don't try to load anything, potentially causing an infinite loop.
4208     ObjPtr<mirror::Throwable> pre_allocated =
4209         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4210     self->SetException(pre_allocated);
4211     return nullptr;
4212   }
4213 
4214   MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4215                                                                      class_loader)));
4216   if (component_type == nullptr) {
4217     DCHECK(self->IsExceptionPending());
4218     // We need to accept erroneous classes as component types.
4219     const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4220     component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
4221     if (component_type == nullptr) {
4222       DCHECK(self->IsExceptionPending());
4223       return nullptr;
4224     } else {
4225       self->ClearException();
4226     }
4227   }
4228   if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4229     ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4230     return nullptr;
4231   }
4232   // See if the component type is already loaded.  Array classes are
4233   // always associated with the class loader of their underlying
4234   // element type -- an array of Strings goes with the loader for
4235   // java/lang/String -- so we need to look for it there.  (The
4236   // caller should have checked for the existence of the class
4237   // before calling here, but they did so with *their* class loader,
4238   // not the component type's loader.)
4239   //
4240   // If we find it, the caller adds "loader" to the class' initiating
4241   // loader list, which should prevent us from going through this again.
4242   //
4243   // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4244   // are the same, because our caller (FindClass) just did the
4245   // lookup.  (Even if we get this wrong we still have correct behavior,
4246   // because we effectively do this lookup again when we add the new
4247   // class to the hash table --- necessary because of possible races with
4248   // other threads.)
4249   if (class_loader.Get() != component_type->GetClassLoader()) {
4250     ObjPtr<mirror::Class> new_class =
4251         LookupClass(self, descriptor, hash, component_type->GetClassLoader());
4252     if (new_class != nullptr) {
4253       return new_class;
4254     }
4255   }
4256   // Core array classes, i.e. Object[], Class[], String[] and primitive
4257   // arrays, have special initialization and they should be found above.
4258   DCHECK(!component_type->IsObjectClass() ||
4259          // Guard from false positives for errors before setting superclass.
4260          component_type->IsErroneousUnresolved());
4261   DCHECK(!component_type->IsStringClass());
4262   DCHECK(!component_type->IsClassClass());
4263   DCHECK(!component_type->IsPrimitive());
4264 
4265   // Fill out the fields in the Class.
4266   //
4267   // It is possible to execute some methods against arrays, because
4268   // all arrays are subclasses of java_lang_Object_, so we need to set
4269   // up a vtable.  We can just point at the one in java_lang_Object_.
4270   //
4271   // Array classes are simple enough that we don't need to do a full
4272   // link step.
4273   size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4274   auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4275                                                           size_t usable_size)
4276       REQUIRES_SHARED(Locks::mutator_lock_) {
4277     ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
4278     mirror::Class::InitializeClassVisitor init_class(array_class_size);
4279     init_class(obj, usable_size);
4280     ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4281     klass->SetComponentType(component_type.Get());
4282     // Do not hold lock for initialization, the fence issued after the visitor
4283     // returns ensures memory visibility together with the implicit consume
4284     // semantics (for all supported architectures) for any thread that loads
4285     // the array class reference from any memory locations afterwards.
4286     FinishArrayClassSetup(klass);
4287   };
4288   auto new_class = hs.NewHandle<mirror::Class>(
4289       AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4290   if (new_class == nullptr) {
4291     self->AssertPendingOOMException();
4292     return nullptr;
4293   }
4294 
4295   ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
4296   if (existing == nullptr) {
4297     // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4298     // duplicate events in case of races. Array classes don't really follow dedicated
4299     // load and prepare, anyways.
4300     Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4301     Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4302 
4303     jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4304     return new_class.Get();
4305   }
4306   // Another thread must have loaded the class after we
4307   // started but before we finished.  Abandon what we've
4308   // done.
4309   //
4310   // (Yes, this happens.)
4311 
4312   return existing;
4313 }
4314 
LookupPrimitiveClass(char type)4315 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4316   ClassRoot class_root;
4317   switch (type) {
4318     case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4319     case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4320     case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4321     case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4322     case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4323     case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4324     case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4325     case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4326     case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4327     default:
4328       return nullptr;
4329   }
4330   return GetClassRoot(class_root, this);
4331 }
4332 
FindPrimitiveClass(char type)4333 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4334   ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4335   if (UNLIKELY(result == nullptr)) {
4336     std::string printable_type(PrintableChar(type));
4337     ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4338   }
4339   return result;
4340 }
4341 
InsertClass(const char * descriptor,ObjPtr<mirror::Class> klass,size_t hash)4342 ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4343                                                ObjPtr<mirror::Class> klass,
4344                                                size_t hash) {
4345   DCHECK(Thread::Current()->CanLoadClasses());
4346   if (VLOG_IS_ON(class_linker)) {
4347     ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4348     std::string source;
4349     if (dex_cache != nullptr) {
4350       source += " from ";
4351       source += dex_cache->GetLocation()->ToModifiedUtf8();
4352     }
4353     LOG(INFO) << "Loaded class " << descriptor << source;
4354   }
4355   {
4356     WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4357     const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4358     ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4359     ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4360     if (existing != nullptr) {
4361       return existing;
4362     }
4363     VerifyObject(klass);
4364     class_table->InsertWithHash(klass, hash);
4365     if (class_loader != nullptr) {
4366       // This is necessary because we need to have the card dirtied for remembered sets.
4367       WriteBarrier::ForEveryFieldWrite(class_loader);
4368     }
4369     if (log_new_roots_) {
4370       new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
4371     }
4372   }
4373   if (kIsDebugBuild) {
4374     // Test that copied methods correctly can find their holder.
4375     for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4376       CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4377     }
4378   }
4379   return nullptr;
4380 }
4381 
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4382 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4383   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4384   DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4385   if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4386     new_bss_roots_boot_oat_files_.push_back(oat_file);
4387   }
4388 }
4389 
4390 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4391 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4392                                      LengthPrefixedArray<ArtMethod>* new_methods) {
4393   klass->SetMethodsPtrUnchecked(new_methods,
4394                                 klass->NumDirectMethods(),
4395                                 klass->NumDeclaredVirtualMethods());
4396   // Need to mark the card so that the remembered sets and mod union tables get updated.
4397   WriteBarrier::ForEveryFieldWrite(klass);
4398 }
4399 
LookupClass(Thread * self,const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)4400 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4401                                                const char* descriptor,
4402                                                ObjPtr<mirror::ClassLoader> class_loader) {
4403   return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4404 }
4405 
LookupClass(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4406 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4407                                                const char* descriptor,
4408                                                size_t hash,
4409                                                ObjPtr<mirror::ClassLoader> class_loader) {
4410   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4411   ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4412   if (class_table != nullptr) {
4413     ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4414     if (result != nullptr) {
4415       return result;
4416     }
4417   }
4418   return nullptr;
4419 }
4420 
4421 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4422  public:
MoveClassTableToPreZygoteVisitor()4423   MoveClassTableToPreZygoteVisitor() {}
4424 
Visit(ObjPtr<mirror::ClassLoader> class_loader)4425   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4426       REQUIRES(Locks::classlinker_classes_lock_)
4427       REQUIRES_SHARED(Locks::mutator_lock_) override {
4428     ClassTable* const class_table = class_loader->GetClassTable();
4429     if (class_table != nullptr) {
4430       class_table->FreezeSnapshot();
4431     }
4432   }
4433 };
4434 
MoveClassTableToPreZygote()4435 void ClassLinker::MoveClassTableToPreZygote() {
4436   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4437   boot_class_table_->FreezeSnapshot();
4438   MoveClassTableToPreZygoteVisitor visitor;
4439   VisitClassLoaders(&visitor);
4440 }
4441 
4442 // Look up classes by hash and descriptor and put all matching ones in the result array.
4443 class LookupClassesVisitor : public ClassLoaderVisitor {
4444  public:
LookupClassesVisitor(const char * descriptor,size_t hash,std::vector<ObjPtr<mirror::Class>> * result)4445   LookupClassesVisitor(const char* descriptor,
4446                        size_t hash,
4447                        std::vector<ObjPtr<mirror::Class>>* result)
4448      : descriptor_(descriptor),
4449        hash_(hash),
4450        result_(result) {}
4451 
Visit(ObjPtr<mirror::ClassLoader> class_loader)4452   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4453       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
4454     ClassTable* const class_table = class_loader->GetClassTable();
4455     ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
4456     // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4457     if (klass != nullptr && klass->GetClassLoader() == class_loader) {
4458       result_->push_back(klass);
4459     }
4460   }
4461 
4462  private:
4463   const char* const descriptor_;
4464   const size_t hash_;
4465   std::vector<ObjPtr<mirror::Class>>* const result_;
4466 };
4467 
LookupClasses(const char * descriptor,std::vector<ObjPtr<mirror::Class>> & result)4468 void ClassLinker::LookupClasses(const char* descriptor,
4469                                 std::vector<ObjPtr<mirror::Class>>& result) {
4470   result.clear();
4471   Thread* const self = Thread::Current();
4472   ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4473   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4474   ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
4475   if (klass != nullptr) {
4476     DCHECK(klass->GetClassLoader() == nullptr);
4477     result.push_back(klass);
4478   }
4479   LookupClassesVisitor visitor(descriptor, hash, &result);
4480   VisitClassLoaders(&visitor);
4481 }
4482 
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4483 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4484                                                verifier::VerifierDeps* verifier_deps,
4485                                                Handle<mirror::Class> klass,
4486                                                Handle<mirror::Class> supertype) {
4487   DCHECK(self != nullptr);
4488   DCHECK(klass != nullptr);
4489   DCHECK(supertype != nullptr);
4490 
4491   if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4492     VerifyClass(self, verifier_deps, supertype);
4493   }
4494 
4495   if (supertype->IsVerified()
4496       || supertype->ShouldVerifyAtRuntime()
4497       || supertype->IsVerifiedNeedsAccessChecks()) {
4498     // The supertype is either verified, or we soft failed at AOT time.
4499     DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4500     return true;
4501   }
4502   // If we got this far then we have a hard failure.
4503   std::string error_msg =
4504       StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4505                    klass->PrettyDescriptor().c_str(),
4506                    supertype->PrettyDescriptor().c_str());
4507   LOG(WARNING) << error_msg  << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4508   StackHandleScope<1> hs(self);
4509   Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4510   if (cause != nullptr) {
4511     // Set during VerifyClass call (if at all).
4512     self->ClearException();
4513   }
4514   // Change into a verify error.
4515   ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4516   if (cause != nullptr) {
4517     self->GetException()->SetCause(cause.Get());
4518   }
4519   ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4520   if (Runtime::Current()->IsAotCompiler()) {
4521     Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4522   }
4523   // Need to grab the lock to change status.
4524   ObjectLock<mirror::Class> super_lock(self, klass);
4525   mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4526   return false;
4527 }
4528 
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4529 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4530                                                verifier::VerifierDeps* verifier_deps,
4531                                                Handle<mirror::Class> klass,
4532                                                verifier::HardFailLogMode log_level) {
4533   {
4534     // TODO: assert that the monitor on the Class is held
4535     ObjectLock<mirror::Class> lock(self, klass);
4536 
4537     // Is somebody verifying this now?
4538     ClassStatus old_status = klass->GetStatus();
4539     while (old_status == ClassStatus::kVerifying) {
4540       lock.WaitIgnoringInterrupts();
4541       // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4542       // case we may see the same status again. b/62912904. This is why the check is
4543       // greater or equal.
4544       CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4545           << "Class '" << klass->PrettyClass()
4546           << "' performed an illegal verification state transition from " << old_status
4547           << " to " << klass->GetStatus();
4548       old_status = klass->GetStatus();
4549     }
4550 
4551     // The class might already be erroneous, for example at compile time if we attempted to verify
4552     // this class as a parent to another.
4553     if (klass->IsErroneous()) {
4554       ThrowEarlierClassFailure(klass.Get());
4555       return verifier::FailureKind::kHardFailure;
4556     }
4557 
4558     // Don't attempt to re-verify if already verified.
4559     if (klass->IsVerified()) {
4560       EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4561       if (verifier_deps != nullptr &&
4562           verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4563           !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4564           !Runtime::Current()->IsAotCompiler()) {
4565         // If the klass is verified, but `verifier_deps` did not record it, this
4566         // means we are running background verification of a secondary dex file.
4567         // Re-run the verifier to populate `verifier_deps`.
4568         // No need to run the verification when running on the AOT Compiler, as
4569         // the driver handles those multithreaded cases already.
4570         std::string error_msg;
4571         verifier::FailureKind failure =
4572             PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4573         // We could have soft failures, so just check that we don't have a hard
4574         // failure.
4575         DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4576       }
4577       return verifier::FailureKind::kNoFailure;
4578     }
4579 
4580     if (klass->IsVerifiedNeedsAccessChecks()) {
4581       if (!Runtime::Current()->IsAotCompiler()) {
4582         // Mark the class as having a verification attempt to avoid re-running
4583         // the verifier and avoid calling EnsureSkipAccessChecksMethods.
4584         klass->SetVerificationAttempted();
4585         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4586       }
4587       return verifier::FailureKind::kAccessChecksFailure;
4588     }
4589 
4590     // For AOT, don't attempt to re-verify if we have already found we should
4591     // verify at runtime.
4592     if (klass->ShouldVerifyAtRuntime()) {
4593       CHECK(Runtime::Current()->IsAotCompiler());
4594       return verifier::FailureKind::kSoftFailure;
4595     }
4596 
4597     DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4598     mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4599 
4600     // Skip verification if disabled.
4601     if (!Runtime::Current()->IsVerificationEnabled()) {
4602       mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4603       EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4604       return verifier::FailureKind::kNoFailure;
4605     }
4606   }
4607 
4608   VLOG(class_linker) << "Beginning verification for class: "
4609                      << klass->PrettyDescriptor()
4610                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4611 
4612   // Verify super class.
4613   StackHandleScope<2> hs(self);
4614   MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4615   // If we have a superclass and we get a hard verification failure we can return immediately.
4616   if (supertype != nullptr &&
4617       !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
4618     CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4619     return verifier::FailureKind::kHardFailure;
4620   }
4621 
4622   // Verify all default super-interfaces.
4623   //
4624   // (1) Don't bother if the superclass has already had a soft verification failure.
4625   //
4626   // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4627   //     recursive initialization by themselves. This is because when an interface is initialized
4628   //     directly it must not initialize its superinterfaces. We are allowed to verify regardless
4629   //     but choose not to for an optimization. If the interfaces is being verified due to a class
4630   //     initialization (which would need all the default interfaces to be verified) the class code
4631   //     will trigger the recursive verification anyway.
4632   if ((supertype == nullptr || supertype->IsVerified())  // See (1)
4633       && !klass->IsInterface()) {                              // See (2)
4634     int32_t iftable_count = klass->GetIfTableCount();
4635     MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4636     // Loop through all interfaces this class has defined. It doesn't matter the order.
4637     for (int32_t i = 0; i < iftable_count; i++) {
4638       iface.Assign(klass->GetIfTable()->GetInterface(i));
4639       DCHECK(iface != nullptr);
4640       // We only care if we have default interfaces and can skip if we are already verified...
4641       if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4642         continue;
4643       } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
4644         // We had a hard failure while verifying this interface. Just return immediately.
4645         CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4646         return verifier::FailureKind::kHardFailure;
4647       } else if (UNLIKELY(!iface->IsVerified())) {
4648         // We softly failed to verify the iface. Stop checking and clean up.
4649         // Put the iface into the supertype handle so we know what caused us to fail.
4650         supertype.Assign(iface.Get());
4651         break;
4652       }
4653     }
4654   }
4655 
4656   // At this point if verification failed, then supertype is the "first" supertype that failed
4657   // verification (without a specific order). If verification succeeded, then supertype is either
4658   // null or the original superclass of klass and is verified.
4659   DCHECK(supertype == nullptr ||
4660          supertype.Get() == klass->GetSuperClass() ||
4661          !supertype->IsVerified());
4662 
4663   // Try to use verification information from the oat file, otherwise do runtime verification.
4664   const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
4665   ClassStatus oat_file_class_status(ClassStatus::kNotReady);
4666   bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
4667 
4668   VLOG(class_linker) << "Class preverified status for class "
4669                      << klass->PrettyDescriptor()
4670                      << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4671                      << ": "
4672                      << preverified
4673                      << "( " << oat_file_class_status << ")";
4674 
4675   // If the oat file says the class had an error, re-run the verifier. That way we will get a
4676   // precise error message. To ensure a rerun, test:
4677   //     mirror::Class::IsErroneous(oat_file_class_status) => !preverified
4678   DCHECK(!mirror::Class::IsErroneous(oat_file_class_status) || !preverified);
4679 
4680   std::string error_msg;
4681   verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
4682   if (!preverified) {
4683     verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4684   }
4685 
4686   // Verification is done, grab the lock again.
4687   ObjectLock<mirror::Class> lock(self, klass);
4688 
4689   if (preverified || verifier_failure != verifier::FailureKind::kHardFailure) {
4690     if (!preverified && verifier_failure != verifier::FailureKind::kNoFailure) {
4691       VLOG(class_linker) << "Soft verification failure in class "
4692                          << klass->PrettyDescriptor()
4693                          << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4694                          << " because: " << error_msg;
4695     }
4696     self->AssertNoPendingException();
4697     // Make sure all classes referenced by catch blocks are resolved.
4698     ResolveClassExceptionHandlerTypes(klass);
4699     if (verifier_failure == verifier::FailureKind::kNoFailure) {
4700       // Even though there were no verifier failures we need to respect whether the super-class and
4701       // super-default-interfaces were verified or requiring runtime reverification.
4702       if (supertype == nullptr
4703           || supertype->IsVerified()
4704           || supertype->IsVerifiedNeedsAccessChecks()) {
4705         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4706       } else {
4707         CHECK(Runtime::Current()->IsAotCompiler());
4708         CHECK_EQ(supertype->GetStatus(), ClassStatus::kRetryVerificationAtRuntime);
4709         mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4710         // Pretend a soft failure occurred so that we don't consider the class verified below.
4711         verifier_failure = verifier::FailureKind::kSoftFailure;
4712       }
4713     } else {
4714       CHECK(verifier_failure == verifier::FailureKind::kSoftFailure ||
4715             verifier_failure == verifier::FailureKind::kTypeChecksFailure ||
4716             verifier_failure == verifier::FailureKind::kAccessChecksFailure);
4717       // Soft failures at compile time should be retried at runtime. Soft
4718       // failures at runtime will be handled by slow paths in the generated
4719       // code. Set status accordingly.
4720       if (Runtime::Current()->IsAotCompiler()) {
4721         if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4722             verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
4723           mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4724         } else {
4725           mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4726         }
4727       } else {
4728         mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4729         // As this is a fake verified status, make sure the methods are _not_ marked
4730         // kAccSkipAccessChecks later.
4731         klass->SetVerificationAttempted();
4732       }
4733     }
4734   } else {
4735     VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
4736                   << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4737                   << " because: " << error_msg;
4738     self->AssertNoPendingException();
4739     ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4740     mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4741   }
4742   if (preverified || verifier_failure == verifier::FailureKind::kNoFailure) {
4743     if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks ||
4744         UNLIKELY(Runtime::Current()->IsVerificationSoftFail())) {
4745       // Never skip access checks if the verification soft fail is forced.
4746       // Mark the class as having a verification attempt to avoid re-running the verifier.
4747       klass->SetVerificationAttempted();
4748     } else {
4749       // Class is verified so we don't need to do any access check on its methods.
4750       // Let the interpreter know it by setting the kAccSkipAccessChecks flag onto each
4751       // method.
4752       // Note: we're going here during compilation and at runtime. When we set the
4753       // kAccSkipAccessChecks flag when compiling image classes, the flag is recorded
4754       // in the image and is set when loading the image.
4755       EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
4756     }
4757   }
4758   // Done verifying. Notify the compiler about the verification status, in case the class
4759   // was verified implicitly (eg super class of a compiled class).
4760   if (Runtime::Current()->IsAotCompiler()) {
4761     Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4762         ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
4763   }
4764   return verifier_failure;
4765 }
4766 
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)4767 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
4768                                                             verifier::VerifierDeps* verifier_deps,
4769                                                             Handle<mirror::Class> klass,
4770                                                             verifier::HardFailLogMode log_level,
4771                                                             std::string* error_msg) {
4772   Runtime* const runtime = Runtime::Current();
4773   return verifier::ClassVerifier::VerifyClass(self,
4774                                               verifier_deps,
4775                                               klass.Get(),
4776                                               runtime->GetCompilerCallbacks(),
4777                                               runtime->IsAotCompiler(),
4778                                               log_level,
4779                                               Runtime::Current()->GetTargetSdkVersion(),
4780                                               error_msg);
4781 }
4782 
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)4783 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4784                                           const DexFile& dex_file,
4785                                           Handle<mirror::Class> klass,
4786                                           ClassStatus& oat_file_class_status) {
4787   // If we're compiling, we can only verify the class using the oat file if
4788   // we are not compiling the image or if the class we're verifying is not part of
4789   // the compilation unit (app - dependencies). We will let the compiler callback
4790   // tell us about the latter.
4791   if (Runtime::Current()->IsAotCompiler()) {
4792     CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
4793     // We are compiling an app (not the image).
4794     if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
4795       return false;
4796     }
4797   }
4798 
4799   const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
4800   // In case we run without an image there won't be a backing oat file.
4801   if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
4802     return false;
4803   }
4804 
4805   uint16_t class_def_index = klass->GetDexClassDefIndex();
4806   oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
4807   if (oat_file_class_status >= ClassStatus::kVerified) {
4808     return true;
4809   }
4810   if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4811     // We return that the clas has already been verified, and the caller should
4812     // check the class status to ensure we run with access checks.
4813     return true;
4814   }
4815 
4816   // Check the class status with the vdex file.
4817   const OatFile* oat_file = oat_dex_file->GetOatFile();
4818   if (oat_file != nullptr) {
4819     oat_file_class_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4820     if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4821       return true;
4822     }
4823   }
4824 
4825   // If we only verified a subset of the classes at compile time, we can end up with classes that
4826   // were resolved by the verifier.
4827   if (oat_file_class_status == ClassStatus::kResolved) {
4828     return false;
4829   }
4830   // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4831   CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4832       << klass->PrettyClass() << " " << dex_file.GetLocation();
4833 
4834   if (mirror::Class::IsErroneous(oat_file_class_status)) {
4835     // Compile time verification failed with a hard error. This is caused by invalid instructions
4836     // in the class. These errors are unrecoverable.
4837     return false;
4838   }
4839   if (oat_file_class_status == ClassStatus::kNotReady) {
4840     // Status is uninitialized if we couldn't determine the status at compile time, for example,
4841     // not loading the class.
4842     // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4843     // isn't a problem and this case shouldn't occur
4844     return false;
4845   }
4846   std::string temp;
4847   LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
4848              << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
4849              << klass->GetDescriptor(&temp);
4850   UNREACHABLE();
4851 }
4852 
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)4853 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
4854   for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
4855     ResolveMethodExceptionHandlerTypes(&method);
4856   }
4857 }
4858 
ResolveMethodExceptionHandlerTypes(ArtMethod * method)4859 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
4860   // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
4861   CodeItemDataAccessor accessor(method->DexInstructionData());
4862   if (!accessor.HasCodeItem()) {
4863     return;  // native or abstract method
4864   }
4865   if (accessor.TriesSize() == 0) {
4866     return;  // nothing to process
4867   }
4868   const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
4869   uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
4870   for (uint32_t idx = 0; idx < handlers_size; idx++) {
4871     CatchHandlerIterator iterator(handlers_ptr);
4872     for (; iterator.HasNext(); iterator.Next()) {
4873       // Ensure exception types are resolved so that they don't need resolution to be delivered,
4874       // unresolved exception types will be ignored by exception delivery
4875       if (iterator.GetHandlerTypeIndex().IsValid()) {
4876         ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
4877         if (exception_type == nullptr) {
4878           DCHECK(Thread::Current()->IsExceptionPending());
4879           Thread::Current()->ClearException();
4880         }
4881       }
4882     }
4883     handlers_ptr = iterator.EndDataPointer();
4884   }
4885 }
4886 
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)4887 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4888                                                     jstring name,
4889                                                     jobjectArray interfaces,
4890                                                     jobject loader,
4891                                                     jobjectArray methods,
4892                                                     jobjectArray throws) {
4893   Thread* self = soa.Self();
4894 
4895   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4896   // code to be executed. We put it up here so we can avoid all the allocations associated with
4897   // creating the class. This can happen with (eg) jit-threads.
4898   if (!self->CanLoadClasses()) {
4899     // Make sure we don't try to load anything, potentially causing an infinite loop.
4900     ObjPtr<mirror::Throwable> pre_allocated =
4901         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4902     self->SetException(pre_allocated);
4903     return nullptr;
4904   }
4905 
4906   StackHandleScope<12> hs(self);
4907   MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
4908       AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
4909   if (temp_klass == nullptr) {
4910     CHECK(self->IsExceptionPending());  // OOME.
4911     return nullptr;
4912   }
4913   DCHECK(temp_klass->GetClass() != nullptr);
4914   temp_klass->SetObjectSize(sizeof(mirror::Proxy));
4915   // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4916   // the methods.
4917   temp_klass->SetAccessFlagsDuringLinking(
4918       kAccClassIsProxy | kAccPublic | kAccFinal | kAccVerificationAttempted);
4919   temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
4920   DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4921   temp_klass->SetName(soa.Decode<mirror::String>(name));
4922   temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
4923   // Object has an empty iftable, copy it for that reason.
4924   temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4925   mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
4926   std::string storage;
4927   const char* descriptor = temp_klass->GetDescriptor(&storage);
4928   const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4929 
4930   // Needs to be before we insert the class so that the allocator field is set.
4931   LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
4932 
4933   // Insert the class before loading the fields as the field roots
4934   // (ArtField::declaring_class_) are only visited from the class
4935   // table. There can't be any suspend points between inserting the
4936   // class and setting the field arrays below.
4937   ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
4938   CHECK(existing == nullptr);
4939 
4940   // Instance fields are inherited, but we add a couple of static fields...
4941   const size_t num_fields = 2;
4942   LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
4943   temp_klass->SetSFieldsPtr(sfields);
4944 
4945   // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4946   // our proxy, so Class.getInterfaces doesn't return the flattened set.
4947   ArtField& interfaces_sfield = sfields->At(0);
4948   interfaces_sfield.SetDexFieldIndex(0);
4949   interfaces_sfield.SetDeclaringClass(temp_klass.Get());
4950   interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4951 
4952   // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
4953   ArtField& throws_sfield = sfields->At(1);
4954   throws_sfield.SetDexFieldIndex(1);
4955   throws_sfield.SetDeclaringClass(temp_klass.Get());
4956   throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
4957 
4958   // Proxies have 1 direct method, the constructor
4959   const size_t num_direct_methods = 1;
4960 
4961   // The array we get passed contains all methods, including private and static
4962   // ones that aren't proxied. We need to filter those out since only interface
4963   // methods (non-private & virtual) are actually proxied.
4964   Handle<mirror::ObjectArray<mirror::Method>> h_methods =
4965       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
4966   DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
4967       << mirror::Class::PrettyClass(h_methods->GetClass());
4968   // List of the actual virtual methods this class will have.
4969   std::vector<ArtMethod*> proxied_methods;
4970   std::vector<size_t> proxied_throws_idx;
4971   proxied_methods.reserve(h_methods->GetLength());
4972   proxied_throws_idx.reserve(h_methods->GetLength());
4973   // Filter out to only the non-private virtual methods.
4974   for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
4975     ArtMethod* m = mirror->GetArtMethod();
4976     if (!m->IsPrivate() && !m->IsStatic()) {
4977       proxied_methods.push_back(m);
4978       proxied_throws_idx.push_back(idx);
4979     }
4980   }
4981   const size_t num_virtual_methods = proxied_methods.size();
4982   // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
4983   // contains an array of all the classes each function is declared to throw.
4984   // This is used to wrap unexpected exceptions in a
4985   // UndeclaredThrowableException exception. This array is in the same order as
4986   // the methods array and like the methods array must be filtered to remove any
4987   // non-proxied methods.
4988   const bool has_filtered_methods =
4989       static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
4990   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
4991       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
4992   MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
4993       hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
4994           (has_filtered_methods)
4995               ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
4996                     self, original_proxied_throws->GetClass(), num_virtual_methods)
4997               : original_proxied_throws.Get()));
4998   if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
4999     self->AssertPendingOOMException();
5000     return nullptr;
5001   }
5002   if (has_filtered_methods) {
5003     for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5004       DCHECK_LE(new_idx, orig_idx);
5005       proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5006     }
5007   }
5008 
5009   // Create the methods array.
5010   LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5011         self, allocator, num_direct_methods + num_virtual_methods);
5012   // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5013   // want to throw OOM in the future.
5014   if (UNLIKELY(proxy_class_methods == nullptr)) {
5015     self->AssertPendingOOMException();
5016     return nullptr;
5017   }
5018   temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
5019 
5020   // Create the single direct method.
5021   CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
5022 
5023   // Create virtual method using specified prototypes.
5024   // TODO These should really use the iterators.
5025   for (size_t i = 0; i < num_virtual_methods; ++i) {
5026     auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5027     auto* prototype = proxied_methods[i];
5028     CreateProxyMethod(temp_klass, prototype, virtual_method);
5029     DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5030     DCHECK(prototype->GetDeclaringClass() != nullptr);
5031   }
5032 
5033   // The super class is java.lang.reflect.Proxy
5034   temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
5035   // Now effectively in the loaded state.
5036   mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
5037   self->AssertNoPendingException();
5038 
5039   // At this point the class is loaded. Publish a ClassLoad event.
5040   // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5041   Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5042 
5043   MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
5044   {
5045     // Must hold lock on object when resolved.
5046     ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
5047     // Link the fields and virtual methods, creating vtable and iftables.
5048     // The new class will replace the old one in the class table.
5049     Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
5050         hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
5051     if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
5052       if (!temp_klass->IsErroneous()) {
5053         mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5054       }
5055       return nullptr;
5056     }
5057   }
5058   CHECK(temp_klass->IsRetired());
5059   CHECK_NE(temp_klass.Get(), klass.Get());
5060 
5061   CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
5062   interfaces_sfield.SetObject<false>(
5063       klass.Get(),
5064       soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5065   CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5066   throws_sfield.SetObject<false>(
5067       klass.Get(),
5068       proxied_throws.Get());
5069 
5070   Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5071 
5072   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5073   // See also ClassLinker::EnsureInitialized().
5074   if (kBitstringSubtypeCheckEnabled) {
5075     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5076     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5077     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5078   }
5079 
5080   VisiblyInitializedCallback* callback = nullptr;
5081   {
5082     // Lock on klass is released. Lock new class object.
5083     ObjectLock<mirror::Class> initialization_lock(self, klass);
5084     EnsureSkipAccessChecksMethods(klass, image_pointer_size_);
5085     // Conservatively go through the ClassStatus::kInitialized state.
5086     callback = MarkClassInitialized(self, klass);
5087   }
5088   if (callback != nullptr) {
5089     callback->MakeVisible(self);
5090   }
5091 
5092   // Consistency checks.
5093   if (kIsDebugBuild) {
5094     CHECK(klass->GetIFieldsPtr() == nullptr);
5095     CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5096 
5097     for (size_t i = 0; i < num_virtual_methods; ++i) {
5098       auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5099       CheckProxyMethod(virtual_method, proxied_methods[i]);
5100     }
5101 
5102     StackHandleScope<1> hs2(self);
5103     Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5104     std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5105                                                    decoded_name->ToModifiedUtf8().c_str()));
5106     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5107 
5108     std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5109                                                decoded_name->ToModifiedUtf8().c_str()));
5110     CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5111 
5112     CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5113              soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5114     CHECK_EQ(klass.Get()->GetProxyThrows(),
5115              proxied_throws.Get());
5116   }
5117   return klass.Get();
5118 }
5119 
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5120 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5121   // Create constructor for Proxy that must initialize the method.
5122   ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5123   CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5124 
5125   // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5126   // on which front-end compiler was used to build the libcore DEX files.
5127   ArtMethod* proxy_constructor =
5128       jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init);
5129   DCHECK(proxy_constructor != nullptr)
5130       << "Could not find <init> method in java.lang.reflect.Proxy";
5131 
5132   // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5133   // code_ too)
5134   DCHECK(out != nullptr);
5135   out->CopyFrom(proxy_constructor, image_pointer_size_);
5136   // Make this constructor public and fix the class to be our Proxy version.
5137   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5138   // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5139   out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5140                       kAccPublic |
5141                       kAccCompileDontBother);
5142   out->SetDeclaringClass(klass.Get());
5143 
5144   // Set the original constructor method.
5145   out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5146 }
5147 
CheckProxyConstructor(ArtMethod * constructor) const5148 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5149   CHECK(constructor->IsConstructor());
5150   auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5151   CHECK_STREQ(np->GetName(), "<init>");
5152   CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5153   DCHECK(constructor->IsPublic());
5154 }
5155 
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5156 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5157                                     ArtMethod* out) {
5158   // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5159   // as necessary
5160   DCHECK(out != nullptr);
5161   out->CopyFrom(prototype, image_pointer_size_);
5162 
5163   // Set class to be the concrete proxy class.
5164   out->SetDeclaringClass(klass.Get());
5165   // Clear the abstract and default flags to ensure that defaults aren't picked in
5166   // preference to the invocation handler.
5167   const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5168   // Make the method final.
5169   // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5170   const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5171   out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5172 
5173   // Set the original interface method.
5174   out->SetDataPtrSize(prototype, image_pointer_size_);
5175 
5176   // At runtime the method looks like a reference and argument saving method, clone the code
5177   // related parameters from this method.
5178   out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5179 }
5180 
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5181 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5182   // Basic consistency checks.
5183   CHECK(!prototype->IsFinal());
5184   CHECK(method->IsFinal());
5185   CHECK(method->IsInvokable());
5186 
5187   // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5188   // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5189   CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5190   CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5191 }
5192 
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5193 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass, bool can_init_statics,
5194                                        bool can_init_parents) {
5195   if (can_init_statics && can_init_parents) {
5196     return true;
5197   }
5198   if (!can_init_statics) {
5199     // Check if there's a class initializer.
5200     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5201     if (clinit != nullptr) {
5202       return false;
5203     }
5204     // Check if there are encoded static values needing initialization.
5205     if (klass->NumStaticFields() != 0) {
5206       const dex::ClassDef* dex_class_def = klass->GetClassDef();
5207       DCHECK(dex_class_def != nullptr);
5208       if (dex_class_def->static_values_off_ != 0) {
5209         return false;
5210       }
5211     }
5212   }
5213   // If we are a class we need to initialize all interfaces with default methods when we are
5214   // initialized. Check all of them.
5215   if (!klass->IsInterface()) {
5216     size_t num_interfaces = klass->GetIfTableCount();
5217     for (size_t i = 0; i < num_interfaces; i++) {
5218       ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5219       if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5220         if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5221           return false;
5222         }
5223       }
5224     }
5225   }
5226   if (klass->IsInterface() || !klass->HasSuperClass()) {
5227     return true;
5228   }
5229   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5230   if (super_class->IsInitialized()) {
5231     return true;
5232   }
5233   return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5234 }
5235 
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5236 bool ClassLinker::InitializeClass(Thread* self,
5237                                   Handle<mirror::Class> klass,
5238                                   bool can_init_statics,
5239                                   bool can_init_parents) {
5240   // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5241 
5242   // Are we already initialized and therefore done?
5243   // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5244   // an initialized class will never change its state.
5245   if (klass->IsInitialized()) {
5246     return true;
5247   }
5248 
5249   // Fast fail if initialization requires a full runtime. Not part of the JLS.
5250   if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5251     return false;
5252   }
5253 
5254   self->AllowThreadSuspension();
5255   Runtime* const runtime = Runtime::Current();
5256   const bool stats_enabled = runtime->HasStatsEnabled();
5257   uint64_t t0;
5258   {
5259     ObjectLock<mirror::Class> lock(self, klass);
5260 
5261     // Re-check under the lock in case another thread initialized ahead of us.
5262     if (klass->IsInitialized()) {
5263       return true;
5264     }
5265 
5266     // Was the class already found to be erroneous? Done under the lock to match the JLS.
5267     if (klass->IsErroneous()) {
5268       ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5269       VlogClassInitializationFailure(klass);
5270       return false;
5271     }
5272 
5273     CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5274         << klass->PrettyClass() << ": state=" << klass->GetStatus();
5275 
5276     if (!klass->IsVerified()) {
5277       VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5278       if (!klass->IsVerified()) {
5279         // We failed to verify, expect either the klass to be erroneous or verification failed at
5280         // compile time.
5281         if (klass->IsErroneous()) {
5282           // The class is erroneous. This may be a verifier error, or another thread attempted
5283           // verification and/or initialization and failed. We can distinguish those cases by
5284           // whether an exception is already pending.
5285           if (self->IsExceptionPending()) {
5286             // Check that it's a VerifyError.
5287             DCHECK_EQ("java.lang.Class<java.lang.VerifyError>",
5288                       mirror::Class::PrettyClass(self->GetException()->GetClass()));
5289           } else {
5290             // Check that another thread attempted initialization.
5291             DCHECK_NE(0, klass->GetClinitThreadId());
5292             DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5293             // Need to rethrow the previous failure now.
5294             ThrowEarlierClassFailure(klass.Get(), true);
5295           }
5296           VlogClassInitializationFailure(klass);
5297         } else {
5298           CHECK(Runtime::Current()->IsAotCompiler());
5299           CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5300           self->AssertNoPendingException();
5301           self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5302         }
5303         self->AssertPendingException();
5304         return false;
5305       } else {
5306         self->AssertNoPendingException();
5307       }
5308 
5309       // A separate thread could have moved us all the way to initialized. A "simple" example
5310       // involves a subclass of the current class being initialized at the same time (which
5311       // will implicitly initialize the superclass, if scheduled that way). b/28254258
5312       DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5313       if (klass->IsInitialized()) {
5314         return true;
5315       }
5316     }
5317 
5318     // If the class is ClassStatus::kInitializing, either this thread is
5319     // initializing higher up the stack or another thread has beat us
5320     // to initializing and we need to wait. Either way, this
5321     // invocation of InitializeClass will not be responsible for
5322     // running <clinit> and will return.
5323     if (klass->GetStatus() == ClassStatus::kInitializing) {
5324       // Could have got an exception during verification.
5325       if (self->IsExceptionPending()) {
5326         VlogClassInitializationFailure(klass);
5327         return false;
5328       }
5329       // We caught somebody else in the act; was it us?
5330       if (klass->GetClinitThreadId() == self->GetTid()) {
5331         // Yes. That's fine. Return so we can continue initializing.
5332         return true;
5333       }
5334       // No. That's fine. Wait for another thread to finish initializing.
5335       return WaitForInitializeClass(klass, self, lock);
5336     }
5337 
5338     // Try to get the oat class's status for this class if the oat file is present. The compiler
5339     // tries to validate superclass descriptors, and writes the result into the oat file.
5340     // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5341     // is different at runtime than it was at compile time, the oat file is rejected. So if the
5342     // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5343     bool has_oat_class = false;
5344     const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5345         ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5346         : OatFile::OatClass::Invalid();
5347     if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5348         !ValidateSuperClassDescriptors(klass)) {
5349       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5350       return false;
5351     }
5352     self->AllowThreadSuspension();
5353 
5354     CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5355         << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5356 
5357     // From here out other threads may observe that we're initializing and so changes of state
5358     // require the a notification.
5359     klass->SetClinitThreadId(self->GetTid());
5360     mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5361 
5362     t0 = stats_enabled ? NanoTime() : 0u;
5363   }
5364 
5365   uint64_t t_sub = 0;
5366 
5367   // Initialize super classes, must be done while initializing for the JLS.
5368   if (!klass->IsInterface() && klass->HasSuperClass()) {
5369     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5370     if (!super_class->IsInitialized()) {
5371       CHECK(!super_class->IsInterface());
5372       CHECK(can_init_parents);
5373       StackHandleScope<1> hs(self);
5374       Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5375       uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5376       bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5377       uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5378       if (!super_initialized) {
5379         // The super class was verified ahead of entering initializing, we should only be here if
5380         // the super class became erroneous due to initialization.
5381         // For the case of aot compiler, the super class might also be initializing but we don't
5382         // want to process circular dependencies in pre-compile.
5383         CHECK(self->IsExceptionPending())
5384             << "Super class initialization failed for "
5385             << handle_scope_super->PrettyDescriptor()
5386             << " that has unexpected status " << handle_scope_super->GetStatus()
5387             << "\nPending exception:\n"
5388             << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5389         ObjectLock<mirror::Class> lock(self, klass);
5390         // Initialization failed because the super-class is erroneous.
5391         mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5392         return false;
5393       }
5394       t_sub = super_t1 - super_t0;
5395     }
5396   }
5397 
5398   if (!klass->IsInterface()) {
5399     // Initialize interfaces with default methods for the JLS.
5400     size_t num_direct_interfaces = klass->NumDirectInterfaces();
5401     // Only setup the (expensive) handle scope if we actually need to.
5402     if (UNLIKELY(num_direct_interfaces > 0)) {
5403       StackHandleScope<1> hs_iface(self);
5404       MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5405       for (size_t i = 0; i < num_direct_interfaces; i++) {
5406         handle_scope_iface.Assign(mirror::Class::GetDirectInterface(self, klass.Get(), i));
5407         CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5408         CHECK(handle_scope_iface->IsInterface());
5409         if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5410           // We have already done this for this interface. Skip it.
5411           continue;
5412         }
5413         // We cannot just call initialize class directly because we need to ensure that ALL
5414         // interfaces with default methods are initialized. Non-default interface initialization
5415         // will not affect other non-default super-interfaces.
5416         // This is not very precise, misses all walking.
5417         uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
5418         bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5419                                                                      handle_scope_iface,
5420                                                                      can_init_statics,
5421                                                                      can_init_parents);
5422         uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
5423         if (!iface_initialized) {
5424           ObjectLock<mirror::Class> lock(self, klass);
5425           // Initialization failed because one of our interfaces with default methods is erroneous.
5426           mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5427           return false;
5428         }
5429         t_sub += inf_t1 - inf_t0;
5430       }
5431     }
5432   }
5433 
5434   const size_t num_static_fields = klass->NumStaticFields();
5435   if (num_static_fields > 0) {
5436     const dex::ClassDef* dex_class_def = klass->GetClassDef();
5437     CHECK(dex_class_def != nullptr);
5438     StackHandleScope<3> hs(self);
5439     Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5440     Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5441 
5442     // Eagerly fill in static fields so that the we don't have to do as many expensive
5443     // Class::FindStaticField in ResolveField.
5444     for (size_t i = 0; i < num_static_fields; ++i) {
5445       ArtField* field = klass->GetStaticField(i);
5446       const uint32_t field_idx = field->GetDexFieldIndex();
5447       ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
5448       if (resolved_field == nullptr) {
5449         // Populating cache of a dex file which defines `klass` should always be allowed.
5450         DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5451             field,
5452             hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5453             hiddenapi::AccessMethod::kNone));
5454         dex_cache->SetResolvedField(field_idx, field);
5455       } else {
5456         DCHECK_EQ(field, resolved_field);
5457       }
5458     }
5459 
5460     annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5461                                                                  class_loader,
5462                                                                  this,
5463                                                                  *dex_class_def);
5464     const DexFile& dex_file = *dex_cache->GetDexFile();
5465 
5466     if (value_it.HasNext()) {
5467       ClassAccessor accessor(dex_file, *dex_class_def);
5468       CHECK(can_init_statics);
5469       for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5470         if (!value_it.HasNext()) {
5471           break;
5472         }
5473         ArtField* art_field = ResolveField(field.GetIndex(),
5474                                            dex_cache,
5475                                            class_loader,
5476                                            /* is_static= */ true);
5477         if (Runtime::Current()->IsActiveTransaction()) {
5478           value_it.ReadValueToField<true>(art_field);
5479         } else {
5480           value_it.ReadValueToField<false>(art_field);
5481         }
5482         if (self->IsExceptionPending()) {
5483           break;
5484         }
5485         value_it.Next();
5486       }
5487       DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5488     }
5489   }
5490 
5491 
5492   if (!self->IsExceptionPending()) {
5493     ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5494     if (clinit != nullptr) {
5495       CHECK(can_init_statics);
5496       JValue result;
5497       clinit->Invoke(self, nullptr, 0, &result, "V");
5498     }
5499   }
5500   self->AllowThreadSuspension();
5501   uint64_t t1 = stats_enabled ? NanoTime() : 0u;
5502 
5503   VisiblyInitializedCallback* callback = nullptr;
5504   bool success = true;
5505   {
5506     ObjectLock<mirror::Class> lock(self, klass);
5507 
5508     if (self->IsExceptionPending()) {
5509       WrapExceptionInInitializer(klass);
5510       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5511       success = false;
5512     } else if (Runtime::Current()->IsTransactionAborted()) {
5513       // The exception thrown when the transaction aborted has been caught and cleared
5514       // so we need to throw it again now.
5515       VLOG(compiler) << "Return from class initializer of "
5516                      << mirror::Class::PrettyDescriptor(klass.Get())
5517                      << " without exception while transaction was aborted: re-throw it now.";
5518       runtime->ThrowTransactionAbortError(self);
5519       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5520       success = false;
5521     } else {
5522       if (stats_enabled) {
5523         RuntimeStats* global_stats = runtime->GetStats();
5524         RuntimeStats* thread_stats = self->GetStats();
5525         ++global_stats->class_init_count;
5526         ++thread_stats->class_init_count;
5527         global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5528         thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5529       }
5530       // Set the class as initialized except if failed to initialize static fields.
5531       callback = MarkClassInitialized(self, klass);
5532       if (VLOG_IS_ON(class_linker)) {
5533         std::string temp;
5534         LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5535             klass->GetLocation();
5536       }
5537     }
5538   }
5539   if (callback != nullptr) {
5540     callback->MakeVisible(self);
5541   }
5542   return success;
5543 }
5544 
5545 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5546 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5547 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5548                                                       Handle<mirror::Class> iface,
5549                                                       bool can_init_statics,
5550                                                       bool can_init_parents) {
5551   CHECK(iface->IsInterface());
5552   size_t num_direct_ifaces = iface->NumDirectInterfaces();
5553   // Only create the (expensive) handle scope if we need it.
5554   if (UNLIKELY(num_direct_ifaces > 0)) {
5555     StackHandleScope<1> hs(self);
5556     MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5557     // First we initialize all of iface's super-interfaces recursively.
5558     for (size_t i = 0; i < num_direct_ifaces; i++) {
5559       ObjPtr<mirror::Class> super_iface = mirror::Class::GetDirectInterface(self, iface.Get(), i);
5560       CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5561       if (!super_iface->HasBeenRecursivelyInitialized()) {
5562         // Recursive step
5563         handle_super_iface.Assign(super_iface);
5564         if (!InitializeDefaultInterfaceRecursive(self,
5565                                                  handle_super_iface,
5566                                                  can_init_statics,
5567                                                  can_init_parents)) {
5568           return false;
5569         }
5570       }
5571     }
5572   }
5573 
5574   bool result = true;
5575   // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5576   // initialize if we don't have default methods.
5577   if (iface->HasDefaultMethods()) {
5578     result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5579   }
5580 
5581   // Mark that this interface has undergone recursive default interface initialization so we know we
5582   // can skip it on any later class initializations. We do this even if we are not a default
5583   // interface since we can still avoid the traversal. This is purely a performance optimization.
5584   if (result) {
5585     // TODO This should be done in a better way
5586     // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5587     //       interface. It is bad (Java) style, but not impossible. Marking the recursive
5588     //       initialization is a performance optimization (to avoid another idempotent visit
5589     //       for other implementing classes/interfaces), and can be revisited later.
5590     ObjectTryLock<mirror::Class> lock(self, iface);
5591     if (lock.Acquired()) {
5592       iface->SetRecursivelyInitialized();
5593     }
5594   }
5595   return result;
5596 }
5597 
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5598 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5599                                          Thread* self,
5600                                          ObjectLock<mirror::Class>& lock)
5601     REQUIRES_SHARED(Locks::mutator_lock_) {
5602   while (true) {
5603     self->AssertNoPendingException();
5604     CHECK(!klass->IsInitialized());
5605     lock.WaitIgnoringInterrupts();
5606 
5607     // When we wake up, repeat the test for init-in-progress.  If
5608     // there's an exception pending (only possible if
5609     // we were not using WaitIgnoringInterrupts), bail out.
5610     if (self->IsExceptionPending()) {
5611       WrapExceptionInInitializer(klass);
5612       mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5613       return false;
5614     }
5615     // Spurious wakeup? Go back to waiting.
5616     if (klass->GetStatus() == ClassStatus::kInitializing) {
5617       continue;
5618     }
5619     if (klass->GetStatus() == ClassStatus::kVerified &&
5620         Runtime::Current()->IsAotCompiler()) {
5621       // Compile time initialization failed.
5622       return false;
5623     }
5624     if (klass->IsErroneous()) {
5625       // The caller wants an exception, but it was thrown in a
5626       // different thread.  Synthesize one here.
5627       ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
5628                                 klass->PrettyDescriptor().c_str());
5629       VlogClassInitializationFailure(klass);
5630       return false;
5631     }
5632     if (klass->IsInitialized()) {
5633       return true;
5634     }
5635     LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
5636         << klass->GetStatus();
5637   }
5638   UNREACHABLE();
5639 }
5640 
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)5641 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5642                                                           Handle<mirror::Class> super_klass,
5643                                                           ArtMethod* method,
5644                                                           ArtMethod* m)
5645     REQUIRES_SHARED(Locks::mutator_lock_) {
5646   DCHECK(Thread::Current()->IsExceptionPending());
5647   DCHECK(!m->IsProxyMethod());
5648   const DexFile* dex_file = m->GetDexFile();
5649   const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5650   const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
5651   dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
5652   std::string return_type = dex_file->PrettyType(return_type_idx);
5653   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5654   ThrowWrappedLinkageError(klass.Get(),
5655                            "While checking class %s method %s signature against %s %s: "
5656                            "Failed to resolve return type %s with %s",
5657                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5658                            ArtMethod::PrettyMethod(method).c_str(),
5659                            super_klass->IsInterface() ? "interface" : "superclass",
5660                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5661                            return_type.c_str(), class_loader.c_str());
5662 }
5663 
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)5664 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5665                                                    Handle<mirror::Class> super_klass,
5666                                                    ArtMethod* method,
5667                                                    ArtMethod* m,
5668                                                    uint32_t index,
5669                                                    dex::TypeIndex arg_type_idx)
5670     REQUIRES_SHARED(Locks::mutator_lock_) {
5671   DCHECK(Thread::Current()->IsExceptionPending());
5672   DCHECK(!m->IsProxyMethod());
5673   const DexFile* dex_file = m->GetDexFile();
5674   std::string arg_type = dex_file->PrettyType(arg_type_idx);
5675   std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
5676   ThrowWrappedLinkageError(klass.Get(),
5677                            "While checking class %s method %s signature against %s %s: "
5678                            "Failed to resolve arg %u type %s with %s",
5679                            mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5680                            ArtMethod::PrettyMethod(method).c_str(),
5681                            super_klass->IsInterface() ? "interface" : "superclass",
5682                            mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5683                            index, arg_type.c_str(), class_loader.c_str());
5684 }
5685 
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)5686 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5687                                    Handle<mirror::Class> super_klass,
5688                                    ArtMethod* method,
5689                                    const std::string& error_msg)
5690     REQUIRES_SHARED(Locks::mutator_lock_) {
5691   ThrowLinkageError(klass.Get(),
5692                     "Class %s method %s resolves differently in %s %s: %s",
5693                     mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5694                     ArtMethod::PrettyMethod(method).c_str(),
5695                     super_klass->IsInterface() ? "interface" : "superclass",
5696                     mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
5697                     error_msg.c_str());
5698 }
5699 
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)5700 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
5701                                                       Handle<mirror::Class> klass,
5702                                                       Handle<mirror::Class> super_klass,
5703                                                       ArtMethod* method1,
5704                                                       ArtMethod* method2)
5705     REQUIRES_SHARED(Locks::mutator_lock_) {
5706   {
5707     StackHandleScope<1> hs(self);
5708     Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
5709     if (UNLIKELY(return_type == nullptr)) {
5710       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
5711       return false;
5712     }
5713     ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
5714     if (UNLIKELY(other_return_type == nullptr)) {
5715       ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
5716       return false;
5717     }
5718     if (UNLIKELY(other_return_type != return_type.Get())) {
5719       ThrowSignatureMismatch(klass, super_klass, method1,
5720                              StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
5721                                           return_type->PrettyClassAndClassLoader().c_str(),
5722                                           return_type.Get(),
5723                                           other_return_type->PrettyClassAndClassLoader().c_str(),
5724                                           other_return_type.Ptr()));
5725       return false;
5726     }
5727   }
5728   const dex::TypeList* types1 = method1->GetParameterTypeList();
5729   const dex::TypeList* types2 = method2->GetParameterTypeList();
5730   if (types1 == nullptr) {
5731     if (types2 != nullptr && types2->Size() != 0) {
5732       ThrowSignatureMismatch(klass, super_klass, method1,
5733                              StringPrintf("Type list mismatch with %s",
5734                                           method2->PrettyMethod(true).c_str()));
5735       return false;
5736     }
5737     return true;
5738   } else if (UNLIKELY(types2 == nullptr)) {
5739     if (types1->Size() != 0) {
5740       ThrowSignatureMismatch(klass, super_klass, method1,
5741                              StringPrintf("Type list mismatch with %s",
5742                                           method2->PrettyMethod(true).c_str()));
5743       return false;
5744     }
5745     return true;
5746   }
5747   uint32_t num_types = types1->Size();
5748   if (UNLIKELY(num_types != types2->Size())) {
5749     ThrowSignatureMismatch(klass, super_klass, method1,
5750                            StringPrintf("Type list mismatch with %s",
5751                                         method2->PrettyMethod(true).c_str()));
5752     return false;
5753   }
5754   for (uint32_t i = 0; i < num_types; ++i) {
5755     StackHandleScope<1> hs(self);
5756     dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
5757     Handle<mirror::Class> param_type(hs.NewHandle(
5758         method1->ResolveClassFromTypeIndex(param_type_idx)));
5759     if (UNLIKELY(param_type == nullptr)) {
5760       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5761                                              method1, i, param_type_idx);
5762       return false;
5763     }
5764     dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
5765     ObjPtr<mirror::Class> other_param_type =
5766         method2->ResolveClassFromTypeIndex(other_param_type_idx);
5767     if (UNLIKELY(other_param_type == nullptr)) {
5768       ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
5769                                              method2, i, other_param_type_idx);
5770       return false;
5771     }
5772     if (UNLIKELY(param_type.Get() != other_param_type)) {
5773       ThrowSignatureMismatch(klass, super_klass, method1,
5774                              StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5775                                           i,
5776                                           param_type->PrettyClassAndClassLoader().c_str(),
5777                                           param_type.Get(),
5778                                           other_param_type->PrettyClassAndClassLoader().c_str(),
5779                                           other_param_type.Ptr()));
5780       return false;
5781     }
5782   }
5783   return true;
5784 }
5785 
5786 
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)5787 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
5788   if (klass->IsInterface()) {
5789     return true;
5790   }
5791   // Begin with the methods local to the superclass.
5792   Thread* self = Thread::Current();
5793   StackHandleScope<1> hs(self);
5794   MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
5795   if (klass->HasSuperClass() &&
5796       klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
5797     super_klass.Assign(klass->GetSuperClass());
5798     for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
5799       auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5800       auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5801       if (m != super_m) {
5802         if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5803                                                                 klass,
5804                                                                 super_klass,
5805                                                                 m,
5806                                                                 super_m))) {
5807           self->AssertPendingException();
5808           return false;
5809         }
5810       }
5811     }
5812   }
5813   for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
5814     super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5815     if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5816       uint32_t num_methods = super_klass->NumVirtualMethods();
5817       for (uint32_t j = 0; j < num_methods; ++j) {
5818         auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5819             j, image_pointer_size_);
5820         auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5821         if (m != super_m) {
5822           if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5823                                                                   klass,
5824                                                                   super_klass,
5825                                                                   m,
5826                                                                   super_m))) {
5827             self->AssertPendingException();
5828             return false;
5829           }
5830         }
5831       }
5832     }
5833   }
5834   return true;
5835 }
5836 
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)5837 bool ClassLinker::EnsureInitialized(Thread* self,
5838                                     Handle<mirror::Class> c,
5839                                     bool can_init_fields,
5840                                     bool can_init_parents) {
5841   DCHECK(c != nullptr);
5842 
5843   if (c->IsInitialized()) {
5844     // If we've seen an initialized but not visibly initialized class
5845     // many times, request visible initialization.
5846     if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5847       // Thanks to the x86 memory model classes skip the initialized status.
5848       DCHECK(c->IsVisiblyInitialized());
5849     } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5850       if (self->IncrementMakeVisiblyInitializedCounter()) {
5851         MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5852       }
5853     }
5854     DCHECK(c->WasVerificationAttempted()) << c->PrettyClassAndClassLoader();
5855     return true;
5856   }
5857   // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5858   //
5859   // Ensure the bitstring is initialized before any of the class initialization
5860   // logic occurs. Once a class initializer starts running, objects can
5861   // escape into the heap and use the subtype checking code.
5862   //
5863   // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5864   // can be used as a source for the IsSubClass check, and that all ancestors
5865   // of the class are Assigned (can be used as a target for IsSubClass check)
5866   // or Overflowed (can be used as a source for IsSubClass check).
5867   if (kBitstringSubtypeCheckEnabled) {
5868     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5869     SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
5870     // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5871   }
5872   const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
5873   if (!success) {
5874     if (can_init_fields && can_init_parents) {
5875       CHECK(self->IsExceptionPending()) << c->PrettyClass();
5876     } else {
5877       // There may or may not be an exception pending. If there is, clear it.
5878       // We propagate the exception only if we can initialize fields and parents.
5879       self->ClearException();
5880     }
5881   } else {
5882     self->AssertNoPendingException();
5883   }
5884   return success;
5885 }
5886 
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)5887 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5888                                                ObjPtr<mirror::Class> new_class) {
5889   DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
5890   for (ArtField& field : new_class->GetIFields()) {
5891     if (field.GetDeclaringClass() == temp_class) {
5892       field.SetDeclaringClass(new_class);
5893     }
5894   }
5895 
5896   DCHECK_EQ(temp_class->NumStaticFields(), 0u);
5897   for (ArtField& field : new_class->GetSFields()) {
5898     if (field.GetDeclaringClass() == temp_class) {
5899       field.SetDeclaringClass(new_class);
5900     }
5901   }
5902 
5903   DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
5904   DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
5905   for (auto& method : new_class->GetMethods(image_pointer_size_)) {
5906     if (method.GetDeclaringClass() == temp_class) {
5907       method.SetDeclaringClass(new_class);
5908     }
5909   }
5910 
5911   // Make sure the remembered set and mod-union tables know that we updated some of the native
5912   // roots.
5913   WriteBarrier::ForEveryFieldWrite(new_class);
5914 }
5915 
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5916 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5917   CHECK(class_loader->GetAllocator() == nullptr);
5918   CHECK(class_loader->GetClassTable() == nullptr);
5919   Thread* const self = Thread::Current();
5920   ClassLoaderData data;
5921   data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
5922   // Create and set the class table.
5923   data.class_table = new ClassTable;
5924   class_loader->SetClassTable(data.class_table);
5925   // Create and set the linear allocator.
5926   data.allocator = Runtime::Current()->CreateLinearAlloc();
5927   class_loader->SetAllocator(data.allocator);
5928   // Add to the list so that we know to free the data later.
5929   class_loaders_.push_back(data);
5930 }
5931 
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5932 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5933   if (class_loader == nullptr) {
5934     return boot_class_table_.get();
5935   }
5936   ClassTable* class_table = class_loader->GetClassTable();
5937   if (class_table == nullptr) {
5938     RegisterClassLoader(class_loader);
5939     class_table = class_loader->GetClassTable();
5940     DCHECK(class_table != nullptr);
5941   }
5942   return class_table;
5943 }
5944 
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)5945 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
5946   return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
5947 }
5948 
FindSuperImt(ObjPtr<mirror::Class> klass,PointerSize pointer_size)5949 static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
5950     REQUIRES_SHARED(Locks::mutator_lock_) {
5951   while (klass->HasSuperClass()) {
5952     klass = klass->GetSuperClass();
5953     if (klass->ShouldHaveImt()) {
5954       return klass->GetImt(pointer_size);
5955     }
5956   }
5957   return nullptr;
5958 }
5959 
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)5960 bool ClassLinker::LinkClass(Thread* self,
5961                             const char* descriptor,
5962                             Handle<mirror::Class> klass,
5963                             Handle<mirror::ObjectArray<mirror::Class>> interfaces,
5964                             MutableHandle<mirror::Class>* h_new_class_out) {
5965   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5966 
5967   if (!LinkSuperClass(klass)) {
5968     return false;
5969   }
5970   ArtMethod* imt_data[ImTable::kSize];
5971   // If there are any new conflicts compared to super class.
5972   bool new_conflict = false;
5973   std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
5974   if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
5975     return false;
5976   }
5977   if (!LinkInstanceFields(self, klass)) {
5978     return false;
5979   }
5980   size_t class_size;
5981   if (!LinkStaticFields(self, klass, &class_size)) {
5982     return false;
5983   }
5984   CreateReferenceInstanceOffsets(klass);
5985   CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
5986 
5987   ImTable* imt = nullptr;
5988   if (klass->ShouldHaveImt()) {
5989     // If there are any new conflicts compared to the super class we can not make a copy. There
5990     // can be cases where both will have a conflict method at the same slot without having the same
5991     // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
5992     // will possibly create a table that is incorrect for either of the classes.
5993     // Same IMT with new_conflict does not happen very often.
5994     if (!new_conflict) {
5995       ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
5996       if (super_imt != nullptr) {
5997         bool imt_equals = true;
5998         for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
5999           imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6000         }
6001         if (imt_equals) {
6002           imt = super_imt;
6003         }
6004       }
6005     }
6006     if (imt == nullptr) {
6007       LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6008       imt = reinterpret_cast<ImTable*>(
6009           allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_)));
6010       if (imt == nullptr) {
6011         return false;
6012       }
6013       imt->Populate(imt_data, image_pointer_size_);
6014     }
6015   }
6016 
6017   if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6018     // We don't need to retire this class as it has no embedded tables or it was created the
6019     // correct size during class linker initialization.
6020     CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
6021 
6022     if (klass->ShouldHaveEmbeddedVTable()) {
6023       klass->PopulateEmbeddedVTable(image_pointer_size_);
6024     }
6025     if (klass->ShouldHaveImt()) {
6026       klass->SetImt(imt, image_pointer_size_);
6027     }
6028 
6029     // Update CHA info based on whether we override methods.
6030     // Have to do this before setting the class as resolved which allows
6031     // instantiation of klass.
6032     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6033       cha_->UpdateAfterLoadingOf(klass);
6034     }
6035 
6036     // This will notify waiters on klass that saw the not yet resolved
6037     // class in the class_table_ during EnsureResolved.
6038     mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
6039     h_new_class_out->Assign(klass.Get());
6040   } else {
6041     CHECK(!klass->IsResolved());
6042     // Retire the temporary class and create the correctly sized resolved class.
6043     StackHandleScope<1> hs(self);
6044     Handle<mirror::Class> h_new_class =
6045         hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
6046     // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6047     // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6048     // may not see any references to the target space and clean the card for a class if another
6049     // class had the same array pointer.
6050     klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
6051     klass->SetSFieldsPtrUnchecked(nullptr);
6052     klass->SetIFieldsPtrUnchecked(nullptr);
6053     if (UNLIKELY(h_new_class == nullptr)) {
6054       self->AssertPendingOOMException();
6055       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
6056       return false;
6057     }
6058 
6059     CHECK_EQ(h_new_class->GetClassSize(), class_size);
6060     ObjectLock<mirror::Class> lock(self, h_new_class);
6061     FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
6062 
6063     if (LIKELY(descriptor != nullptr)) {
6064       WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
6065       const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
6066       ClassTable* const table = InsertClassTableForClassLoader(class_loader);
6067       const ObjPtr<mirror::Class> existing =
6068           table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
6069       if (class_loader != nullptr) {
6070         // We updated the class in the class table, perform the write barrier so that the GC knows
6071         // about the change.
6072         WriteBarrier::ForEveryFieldWrite(class_loader);
6073       }
6074       CHECK_EQ(existing, klass.Get());
6075       if (log_new_roots_) {
6076         new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
6077       }
6078     }
6079 
6080     // Update CHA info based on whether we override methods.
6081     // Have to do this before setting the class as resolved which allows
6082     // instantiation of klass.
6083     if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6084       cha_->UpdateAfterLoadingOf(h_new_class);
6085     }
6086 
6087     // This will notify waiters on temp class that saw the not yet resolved class in the
6088     // class_table_ during EnsureResolved.
6089     mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
6090 
6091     CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
6092     // This will notify waiters on new_class that saw the not yet resolved
6093     // class in the class_table_ during EnsureResolved.
6094     mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
6095     // Return the new class.
6096     h_new_class_out->Assign(h_new_class.Get());
6097   }
6098   return true;
6099 }
6100 
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)6101 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
6102   CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
6103   const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6104   dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6105   if (super_class_idx.IsValid()) {
6106     // Check that a class does not inherit from itself directly.
6107     //
6108     // TODO: This is a cheap check to detect the straightforward case
6109     // of a class extending itself (b/28685551), but we should do a
6110     // proper cycle detection on loaded classes, to detect all cases
6111     // of class circularity errors (b/28830038).
6112     if (super_class_idx == class_def.class_idx_) {
6113       ThrowClassCircularityError(klass.Get(),
6114                                  "Class %s extends itself",
6115                                  klass->PrettyDescriptor().c_str());
6116       return false;
6117     }
6118 
6119     ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6120     if (super_class == nullptr) {
6121       DCHECK(Thread::Current()->IsExceptionPending());
6122       return false;
6123     }
6124     // Verify
6125     if (!klass->CanAccess(super_class)) {
6126       ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6127                               super_class->PrettyDescriptor().c_str(),
6128                               klass->PrettyDescriptor().c_str());
6129       return false;
6130     }
6131     CHECK(super_class->IsResolved());
6132     klass->SetSuperClass(super_class);
6133   }
6134   const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6135   if (interfaces != nullptr) {
6136     for (size_t i = 0; i < interfaces->Size(); i++) {
6137       dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6138       ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6139       if (interface == nullptr) {
6140         DCHECK(Thread::Current()->IsExceptionPending());
6141         return false;
6142       }
6143       // Verify
6144       if (!klass->CanAccess(interface)) {
6145         // TODO: the RI seemed to ignore this in my testing.
6146         ThrowIllegalAccessError(klass.Get(),
6147                                 "Interface %s implemented by class %s is inaccessible",
6148                                 interface->PrettyDescriptor().c_str(),
6149                                 klass->PrettyDescriptor().c_str());
6150         return false;
6151       }
6152     }
6153   }
6154   // Mark the class as loaded.
6155   mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6156   return true;
6157 }
6158 
LinkSuperClass(Handle<mirror::Class> klass)6159 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6160   CHECK(!klass->IsPrimitive());
6161   ObjPtr<mirror::Class> super = klass->GetSuperClass();
6162   ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6163   if (klass.Get() == object_class) {
6164     if (super != nullptr) {
6165       ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6166       return false;
6167     }
6168     return true;
6169   }
6170   if (super == nullptr) {
6171     ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6172                       klass->PrettyDescriptor().c_str());
6173     return false;
6174   }
6175   // Verify
6176   if (klass->IsInterface() && super != object_class) {
6177     ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6178     return false;
6179   }
6180   if (super->IsFinal()) {
6181     ThrowVerifyError(klass.Get(),
6182                      "Superclass %s of %s is declared final",
6183                      super->PrettyDescriptor().c_str(),
6184                      klass->PrettyDescriptor().c_str());
6185     return false;
6186   }
6187   if (super->IsInterface()) {
6188     ThrowIncompatibleClassChangeError(klass.Get(),
6189                                       "Superclass %s of %s is an interface",
6190                                       super->PrettyDescriptor().c_str(),
6191                                       klass->PrettyDescriptor().c_str());
6192     return false;
6193   }
6194   if (!klass->CanAccess(super)) {
6195     ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6196                             super->PrettyDescriptor().c_str(),
6197                             klass->PrettyDescriptor().c_str());
6198     return false;
6199   }
6200 
6201   // Inherit kAccClassIsFinalizable from the superclass in case this
6202   // class doesn't override finalize.
6203   if (super->IsFinalizable()) {
6204     klass->SetFinalizable();
6205   }
6206 
6207   // Inherit class loader flag form super class.
6208   if (super->IsClassLoaderClass()) {
6209     klass->SetClassLoaderClass();
6210   }
6211 
6212   // Inherit reference flags (if any) from the superclass.
6213   uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6214   if (reference_flags != 0) {
6215     CHECK_EQ(klass->GetClassFlags(), 0u);
6216     klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6217   }
6218   // Disallow custom direct subclasses of java.lang.ref.Reference.
6219   if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6220     ThrowLinkageError(klass.Get(),
6221                       "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6222                       klass->PrettyDescriptor().c_str());
6223     return false;
6224   }
6225 
6226   if (kIsDebugBuild) {
6227     // Ensure super classes are fully resolved prior to resolving fields..
6228     while (super != nullptr) {
6229       CHECK(super->IsResolved());
6230       super = super->GetSuperClass();
6231     }
6232   }
6233   return true;
6234 }
6235 
6236 // A wrapper class representing the result of a method translation used for linking methods and
6237 // updating superclass default methods. For each method in a classes vtable there are 4 states it
6238 // could be in:
6239 // 1) No translation is necessary. In this case there is no MethodTranslation object for it. This
6240 //    is the standard case and is true when the method is not overridable by a default method,
6241 //    the class defines a concrete implementation of the method, the default method implementation
6242 //    remains the same, or an abstract method stayed abstract.
6243 // 2) The method must be translated to a different default method. We note this with
6244 //    CreateTranslatedMethod.
6245 // 3) The method must be replaced with a conflict method. This happens when a superclass
6246 //    implements an interface with a default method and this class implements an unrelated
6247 //    interface that also defines that default method. We note this with CreateConflictingMethod.
6248 // 4) The method must be replaced with an abstract miranda method. This happens when a superclass
6249 //    implements an interface with a default method and this class implements a subinterface of
6250 //    the superclass's interface which declares the default method abstract. We note this with
6251 //    CreateAbstractMethod.
6252 //
6253 // When a method translation is unnecessary (case #1), we don't put it into the
6254 // default_translation maps. So an instance of MethodTranslation must be in one of #2-#4.
6255 class ClassLinker::MethodTranslation {
6256  public:
MethodTranslation()6257   MethodTranslation() : translation_(nullptr), type_(Type::kInvalid) {}
6258 
6259   // This slot must become a default conflict method.
CreateConflictingMethod()6260   static MethodTranslation CreateConflictingMethod() {
6261     return MethodTranslation(Type::kConflict, /*translation=*/nullptr);
6262   }
6263 
6264   // This slot must become an abstract method.
CreateAbstractMethod()6265   static MethodTranslation CreateAbstractMethod() {
6266     return MethodTranslation(Type::kAbstract, /*translation=*/nullptr);
6267   }
6268 
6269   // Use the given method as the current value for this vtable slot during translation.
CreateTranslatedMethod(ArtMethod * new_method)6270   static MethodTranslation CreateTranslatedMethod(ArtMethod* new_method) {
6271     return MethodTranslation(Type::kTranslation, new_method);
6272   }
6273 
6274   // Returns true if this is a method that must become a conflict method.
IsInConflict() const6275   bool IsInConflict() const {
6276     return type_ == Type::kConflict;
6277   }
6278 
6279   // Returns true if this is a method that must become an abstract method.
IsAbstract() const6280   bool IsAbstract() const {
6281     return type_ == Type::kAbstract;
6282   }
6283 
6284   // Returns true if this is a method that must become a different method.
IsTranslation() const6285   bool IsTranslation() const {
6286     return type_ == Type::kTranslation;
6287   }
6288 
6289   // Get the translated version of this method.
GetTranslation() const6290   ArtMethod* GetTranslation() const {
6291     DCHECK(IsTranslation());
6292     DCHECK(translation_ != nullptr);
6293     return translation_;
6294   }
6295 
6296  private:
6297   enum class Type {
6298     kInvalid,
6299     kTranslation,
6300     kConflict,
6301     kAbstract,
6302   };
6303 
MethodTranslation(Type type,ArtMethod * translation)6304   MethodTranslation(Type type, ArtMethod* translation)
6305       : translation_(translation), type_(type) {}
6306 
6307   ArtMethod* translation_;
6308   Type type_;
6309 };
6310 
6311 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)6312 bool ClassLinker::LinkMethods(Thread* self,
6313                               Handle<mirror::Class> klass,
6314                               Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6315                               bool* out_new_conflict,
6316                               ArtMethod** out_imt) {
6317   self->AllowThreadSuspension();
6318   // A map from vtable indexes to the method they need to be updated to point to. Used because we
6319   // need to have default methods be in the virtuals array of each class but we don't set that up
6320   // until LinkInterfaceMethods.
6321   constexpr size_t kBufferSize = 8;  // Avoid malloc/free for a few translations.
6322   std::pair<size_t, ClassLinker::MethodTranslation> buffer[kBufferSize];
6323   HashMap<size_t, ClassLinker::MethodTranslation> default_translations(buffer, kBufferSize);
6324   // Link virtual methods then interface methods.
6325   // We set up the interface lookup table first because we need it to determine if we need to update
6326   // any vtable entries with new default method implementations.
6327   return SetupInterfaceLookupTable(self, klass, interfaces)
6328           && LinkVirtualMethods(self, klass, /*out*/ &default_translations)
6329           && LinkInterfaceMethods(self, klass, default_translations, out_new_conflict, out_imt);
6330 }
6331 
6332 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6333 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6334 // caches in the implementation below.
6335 class MethodNameAndSignatureComparator final : public ValueObject {
6336  public:
6337   explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6338       REQUIRES_SHARED(Locks::mutator_lock_) :
6339       dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6340       name_(nullptr), name_len_(0) {
6341     DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6342   }
6343 
GetName()6344   const char* GetName() {
6345     if (name_ == nullptr) {
6346       name_ = dex_file_->StringDataAndUtf16LengthByIdx(mid_->name_idx_, &name_len_);
6347     }
6348     return name_;
6349   }
6350 
HasSameNameAndSignature(ArtMethod * other)6351   bool HasSameNameAndSignature(ArtMethod* other)
6352       REQUIRES_SHARED(Locks::mutator_lock_) {
6353     DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6354     const DexFile* other_dex_file = other->GetDexFile();
6355     const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6356     if (dex_file_ == other_dex_file) {
6357       return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6358     }
6359     GetName();  // Only used to make sure its calculated.
6360     uint32_t other_name_len;
6361     const char* other_name = other_dex_file->StringDataAndUtf16LengthByIdx(other_mid.name_idx_,
6362                                                                            &other_name_len);
6363     if (name_len_ != other_name_len || strcmp(name_, other_name) != 0) {
6364       return false;
6365     }
6366     return dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6367   }
6368 
6369  private:
6370   // Dex file for the method to compare against.
6371   const DexFile* const dex_file_;
6372   // MethodId for the method to compare against.
6373   const dex::MethodId* const mid_;
6374   // Lazily computed name from the dex file's strings.
6375   const char* name_;
6376   // Lazily computed name length.
6377   uint32_t name_len_;
6378 };
6379 
6380 class LinkVirtualHashTable {
6381  public:
LinkVirtualHashTable(Handle<mirror::Class> klass,size_t hash_size,uint32_t * hash_table,PointerSize image_pointer_size)6382   LinkVirtualHashTable(Handle<mirror::Class> klass,
6383                        size_t hash_size,
6384                        uint32_t* hash_table,
6385                        PointerSize image_pointer_size)
6386      : klass_(klass),
6387        hash_size_(hash_size),
6388        hash_table_(hash_table),
6389        image_pointer_size_(image_pointer_size) {
6390     std::fill(hash_table_, hash_table_ + hash_size_, invalid_index_);
6391   }
6392 
Add(uint32_t virtual_method_index)6393   void Add(uint32_t virtual_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
6394     ArtMethod* local_method = klass_->GetVirtualMethodDuringLinking(
6395         virtual_method_index, image_pointer_size_);
6396     const char* name = local_method->GetInterfaceMethodIfProxy(image_pointer_size_)->GetName();
6397     uint32_t hash = ComputeModifiedUtf8Hash(name);
6398     uint32_t index = hash % hash_size_;
6399     // Linear probe until we have an empty slot.
6400     while (hash_table_[index] != invalid_index_) {
6401       if (++index == hash_size_) {
6402         index = 0;
6403       }
6404     }
6405     hash_table_[index] = virtual_method_index;
6406   }
6407 
FindAndRemove(MethodNameAndSignatureComparator * comparator,uint32_t hash)6408   uint32_t FindAndRemove(MethodNameAndSignatureComparator* comparator, uint32_t hash)
6409       REQUIRES_SHARED(Locks::mutator_lock_) {
6410     DCHECK_EQ(hash, ComputeModifiedUtf8Hash(comparator->GetName()));
6411     size_t index = hash % hash_size_;
6412     while (true) {
6413       const uint32_t value = hash_table_[index];
6414       // Since linear probe makes continuous blocks, hitting an invalid index means we are done
6415       // the block and can safely assume not found.
6416       if (value == invalid_index_) {
6417         break;
6418       }
6419       if (value != removed_index_) {  // This signifies not already overriden.
6420         ArtMethod* virtual_method =
6421             klass_->GetVirtualMethodDuringLinking(value, image_pointer_size_);
6422         if (comparator->HasSameNameAndSignature(
6423             virtual_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6424           hash_table_[index] = removed_index_;
6425           return value;
6426         }
6427       }
6428       if (++index == hash_size_) {
6429         index = 0;
6430       }
6431     }
6432     return GetNotFoundIndex();
6433   }
6434 
GetNotFoundIndex()6435   static uint32_t GetNotFoundIndex() {
6436     return invalid_index_;
6437   }
6438 
6439  private:
6440   static const uint32_t invalid_index_;
6441   static const uint32_t removed_index_;
6442 
6443   Handle<mirror::Class> klass_;
6444   const size_t hash_size_;
6445   uint32_t* const hash_table_;
6446   const PointerSize image_pointer_size_;
6447 };
6448 
6449 const uint32_t LinkVirtualHashTable::invalid_index_ = std::numeric_limits<uint32_t>::max();
6450 const uint32_t LinkVirtualHashTable::removed_index_ = std::numeric_limits<uint32_t>::max() - 1;
6451 
LinkVirtualMethods(Thread * self,Handle<mirror::Class> klass,HashMap<size_t,ClassLinker::MethodTranslation> * default_translations)6452 bool ClassLinker::LinkVirtualMethods(
6453     Thread* self,
6454     Handle<mirror::Class> klass,
6455     /*out*/HashMap<size_t, ClassLinker::MethodTranslation>* default_translations) {
6456   const size_t num_virtual_methods = klass->NumVirtualMethods();
6457   if (klass->IsInterface()) {
6458     // No vtable.
6459     if (!IsUint<16>(num_virtual_methods)) {
6460       ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
6461       return false;
6462     }
6463     bool has_defaults = false;
6464     // Assign each method an IMT index and set the default flag.
6465     for (size_t i = 0; i < num_virtual_methods; ++i) {
6466       ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6467       m->SetMethodIndex(i);
6468       if (!m->IsAbstract()) {
6469         // If the dex file does not support default methods, throw ClassFormatError.
6470         // This check is necessary to protect from odd cases, such as native default
6471         // methods, that the dex file verifier permits for old dex file versions. b/157170505
6472         // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
6473         // currently running CTS tests for default methods with dex file version 035 which
6474         // does not support default methods. So, we limit this to native methods. b/157718952
6475         if (m->IsNative()) {
6476           DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
6477           ThrowClassFormatError(klass.Get(),
6478                                 "Dex file does not support default method '%s'",
6479                                 m->PrettyMethod().c_str());
6480           return false;
6481         }
6482         m->SetAccessFlags(m->GetAccessFlags() | kAccDefault);
6483         has_defaults = true;
6484       }
6485     }
6486     // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
6487     // during initialization. This is a performance optimization. We could simply traverse the
6488     // virtual_methods_ array again during initialization.
6489     if (has_defaults) {
6490       klass->SetHasDefaultMethods();
6491     }
6492     return true;
6493   } else if (klass->HasSuperClass()) {
6494     const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
6495     const size_t max_count = num_virtual_methods + super_vtable_length;
6496     StackHandleScope<3> hs(self);
6497     Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
6498     MutableHandle<mirror::PointerArray> vtable;
6499     if (super_class->ShouldHaveEmbeddedVTable()) {
6500       vtable = hs.NewHandle(AllocPointerArray(self, max_count));
6501       if (UNLIKELY(vtable == nullptr)) {
6502         self->AssertPendingOOMException();
6503         return false;
6504       }
6505       for (size_t i = 0; i < super_vtable_length; i++) {
6506         vtable->SetElementPtrSize(
6507             i, super_class->GetEmbeddedVTableEntry(i, image_pointer_size_), image_pointer_size_);
6508       }
6509       // We might need to change vtable if we have new virtual methods or new interfaces (since that
6510       // might give us new default methods). If no new interfaces then we can skip the rest since
6511       // the class cannot override any of the super-class's methods. This is required for
6512       // correctness since without it we might not update overridden default method vtable entries
6513       // correctly.
6514       if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
6515         klass->SetVTable(vtable.Get());
6516         return true;
6517       }
6518     } else {
6519       DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
6520       Handle<mirror::PointerArray> super_vtable = hs.NewHandle(super_class->GetVTable());
6521       CHECK(super_vtable != nullptr) << super_class->PrettyClass();
6522       // We might need to change vtable if we have new virtual methods or new interfaces (since that
6523       // might give us new default methods). See comment above.
6524       if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
6525         klass->SetVTable(super_vtable.Get());
6526         return true;
6527       }
6528       vtable = hs.NewHandle(ObjPtr<mirror::PointerArray>::DownCast(
6529           mirror::Array::CopyOf(super_vtable, self, max_count)));
6530       if (UNLIKELY(vtable == nullptr)) {
6531         self->AssertPendingOOMException();
6532         return false;
6533       }
6534     }
6535     // How the algorithm works:
6536     // 1. Populate hash table by adding num_virtual_methods from klass. The values in the hash
6537     // table are: invalid_index for unused slots, index super_vtable_length + i for a virtual
6538     // method which has not been matched to a vtable method, and j if the virtual method at the
6539     // index overrode the super virtual method at index j.
6540     // 2. Loop through super virtual methods, if they overwrite, update hash table to j
6541     // (j < super_vtable_length) to avoid redundant checks. (TODO maybe use this info for reducing
6542     // the need for the initial vtable which we later shrink back down).
6543     // 3. Add non overridden methods to the end of the vtable.
6544     static constexpr size_t kMaxStackHash = 250;
6545     // + 1 so that even if we only have new default methods we will still be able to use this hash
6546     // table (i.e. it will never have 0 size).
6547     const size_t hash_table_size = num_virtual_methods * 3 + 1;
6548     uint32_t* hash_table_ptr;
6549     std::unique_ptr<uint32_t[]> hash_heap_storage;
6550     if (hash_table_size <= kMaxStackHash) {
6551       hash_table_ptr = reinterpret_cast<uint32_t*>(
6552           alloca(hash_table_size * sizeof(*hash_table_ptr)));
6553     } else {
6554       hash_heap_storage.reset(new uint32_t[hash_table_size]);
6555       hash_table_ptr = hash_heap_storage.get();
6556     }
6557     LinkVirtualHashTable hash_table(klass, hash_table_size, hash_table_ptr, image_pointer_size_);
6558     // Add virtual methods to the hash table.
6559     for (size_t i = 0; i < num_virtual_methods; ++i) {
6560       DCHECK(klass->GetVirtualMethodDuringLinking(
6561           i, image_pointer_size_)->GetDeclaringClass() != nullptr);
6562       hash_table.Add(i);
6563     }
6564     // Loop through each super vtable method and see if they are overridden by a method we added to
6565     // the hash table.
6566     for (size_t j = 0; j < super_vtable_length; ++j) {
6567       // Search the hash table to see if we are overridden by any method.
6568       ArtMethod* super_method = vtable->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6569       if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
6570                                   super_method->GetAccessFlags())) {
6571         // Continue on to the next method since this one is package private and canot be overridden.
6572         // Before Android 4.1, the package-private method super_method might have been incorrectly
6573         // overridden.
6574         continue;
6575       }
6576       MethodNameAndSignatureComparator super_method_name_comparator(
6577           super_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6578       // We remove the method so that subsequent lookups will be faster by making the hash-map
6579       // smaller as we go on.
6580       uint32_t hash = (j < mirror::Object::kVTableLength)
6581           ? object_virtual_method_hashes_[j]
6582           : ComputeModifiedUtf8Hash(super_method_name_comparator.GetName());
6583       uint32_t hash_index = hash_table.FindAndRemove(&super_method_name_comparator, hash);
6584       if (hash_index != hash_table.GetNotFoundIndex()) {
6585         ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(
6586             hash_index, image_pointer_size_);
6587         if (super_method->IsFinal()) {
6588           ThrowLinkageError(klass.Get(), "Method %s overrides final method in class %s",
6589                             virtual_method->PrettyMethod().c_str(),
6590                             super_method->GetDeclaringClassDescriptor());
6591           return false;
6592         }
6593         vtable->SetElementPtrSize(j, virtual_method, image_pointer_size_);
6594         virtual_method->SetMethodIndex(j);
6595       } else if (super_method->IsOverridableByDefaultMethod()) {
6596         // We didn't directly override this method but we might through default methods...
6597         // Check for default method update.
6598         ArtMethod* default_method = nullptr;
6599         switch (FindDefaultMethodImplementation(self,
6600                                                 super_method,
6601                                                 klass,
6602                                                 /*out*/&default_method)) {
6603           case DefaultMethodSearchResult::kDefaultConflict: {
6604             // A conflict was found looking for default methods. Note this (assuming it wasn't
6605             // pre-existing) in the translations map.
6606             if (UNLIKELY(!super_method->IsDefaultConflicting())) {
6607               // Don't generate another conflict method to reduce memory use as an optimization.
6608               default_translations->insert(
6609                   {j, ClassLinker::MethodTranslation::CreateConflictingMethod()});
6610             }
6611             break;
6612           }
6613           case DefaultMethodSearchResult::kAbstractFound: {
6614             // No conflict but method is abstract.
6615             // We note that this vtable entry must be made abstract.
6616             if (UNLIKELY(!super_method->IsAbstract())) {
6617               default_translations->insert(
6618                   {j, ClassLinker::MethodTranslation::CreateAbstractMethod()});
6619             }
6620             break;
6621           }
6622           case DefaultMethodSearchResult::kDefaultFound: {
6623             if (UNLIKELY(super_method->IsDefaultConflicting() ||
6624                         default_method->GetDeclaringClass() != super_method->GetDeclaringClass())) {
6625               // Found a default method implementation that is new.
6626               // TODO Refactor this add default methods to virtuals here and not in
6627               //      LinkInterfaceMethods maybe.
6628               //      The problem is default methods might override previously present
6629               //      default-method or miranda-method vtable entries from the superclass.
6630               //      Unfortunately we need these to be entries in this class's virtuals. We do not
6631               //      give these entries there until LinkInterfaceMethods so we pass this map around
6632               //      to let it know which vtable entries need to be updated.
6633               // Make a note that vtable entry j must be updated, store what it needs to be updated
6634               // to. We will allocate a virtual method slot in LinkInterfaceMethods and fix it up
6635               // then.
6636               default_translations->insert(
6637                   {j, ClassLinker::MethodTranslation::CreateTranslatedMethod(default_method)});
6638               VLOG(class_linker) << "Method " << super_method->PrettyMethod()
6639                                  << " overridden by default "
6640                                  << default_method->PrettyMethod()
6641                                  << " in " << mirror::Class::PrettyClass(klass.Get());
6642             }
6643             break;
6644           }
6645         }
6646       }
6647     }
6648     size_t actual_count = super_vtable_length;
6649     // Add the non-overridden methods at the end.
6650     for (size_t i = 0; i < num_virtual_methods; ++i) {
6651       ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6652       size_t method_idx = local_method->GetMethodIndexDuringLinking();
6653       if (method_idx < super_vtable_length &&
6654           local_method == vtable->GetElementPtrSize<ArtMethod*>(method_idx, image_pointer_size_)) {
6655         continue;
6656       }
6657       vtable->SetElementPtrSize(actual_count, local_method, image_pointer_size_);
6658       local_method->SetMethodIndex(actual_count);
6659       ++actual_count;
6660     }
6661     if (!IsUint<16>(actual_count)) {
6662       ThrowClassFormatError(klass.Get(), "Too many methods defined on class: %zd", actual_count);
6663       return false;
6664     }
6665     // Shrink vtable if possible
6666     CHECK_LE(actual_count, max_count);
6667     if (actual_count < max_count) {
6668       vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(
6669           mirror::Array::CopyOf(vtable, self, actual_count)));
6670       if (UNLIKELY(vtable == nullptr)) {
6671         self->AssertPendingOOMException();
6672         return false;
6673       }
6674     }
6675     klass->SetVTable(vtable.Get());
6676   } else {
6677     CHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(this));
6678     if (!IsUint<16>(num_virtual_methods)) {
6679       ThrowClassFormatError(klass.Get(), "Too many methods: %d",
6680                             static_cast<int>(num_virtual_methods));
6681       return false;
6682     }
6683     ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, num_virtual_methods);
6684     if (UNLIKELY(vtable == nullptr)) {
6685       self->AssertPendingOOMException();
6686       return false;
6687     }
6688     for (size_t i = 0; i < num_virtual_methods; ++i) {
6689       ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, image_pointer_size_);
6690       vtable->SetElementPtrSize(i, virtual_method, image_pointer_size_);
6691       virtual_method->SetMethodIndex(i & 0xFFFF);
6692     }
6693     klass->SetVTable(vtable);
6694     InitializeObjectVirtualMethodHashes(klass.Get(),
6695                                         image_pointer_size_,
6696                                         ArrayRef<uint32_t>(object_virtual_method_hashes_));
6697   }
6698   return true;
6699 }
6700 
6701 // Determine if the given iface has any subinterface in the given list that declares the method
6702 // specified by 'target'.
6703 //
6704 // Arguments
6705 // - self:    The thread we are running on
6706 // - target:  A comparator that will match any method that overrides the method we are checking for
6707 // - iftable: The iftable we are searching for an overriding method on.
6708 // - ifstart: The index of the interface we are checking to see if anything overrides
6709 // - iface:   The interface we are checking to see if anything overrides.
6710 // - image_pointer_size:
6711 //            The image pointer size.
6712 //
6713 // Returns
6714 // - True:  There is some method that matches the target comparator defined in an interface that
6715 //          is a subtype of iface.
6716 // - False: There is no method that matches the target comparator in any interface that is a subtype
6717 //          of iface.
ContainsOverridingMethodOf(Thread * self,MethodNameAndSignatureComparator & target,Handle<mirror::IfTable> iftable,size_t ifstart,Handle<mirror::Class> iface,PointerSize image_pointer_size)6718 static bool ContainsOverridingMethodOf(Thread* self,
6719                                        MethodNameAndSignatureComparator& target,
6720                                        Handle<mirror::IfTable> iftable,
6721                                        size_t ifstart,
6722                                        Handle<mirror::Class> iface,
6723                                        PointerSize image_pointer_size)
6724     REQUIRES_SHARED(Locks::mutator_lock_) {
6725   DCHECK(self != nullptr);
6726   DCHECK(iface != nullptr);
6727   DCHECK(iftable != nullptr);
6728   DCHECK_GE(ifstart, 0u);
6729   DCHECK_LT(ifstart, iftable->Count());
6730   DCHECK_EQ(iface.Get(), iftable->GetInterface(ifstart));
6731   DCHECK(iface->IsInterface());
6732 
6733   size_t iftable_count = iftable->Count();
6734   StackHandleScope<1> hs(self);
6735   MutableHandle<mirror::Class> current_iface(hs.NewHandle<mirror::Class>(nullptr));
6736   for (size_t k = ifstart + 1; k < iftable_count; k++) {
6737     // Skip ifstart since our current interface obviously cannot override itself.
6738     current_iface.Assign(iftable->GetInterface(k));
6739     // Iterate through every method on this interface. The order does not matter.
6740     for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(image_pointer_size)) {
6741       if (UNLIKELY(target.HasSameNameAndSignature(
6742                       current_method.GetInterfaceMethodIfProxy(image_pointer_size)))) {
6743         // Check if the i'th interface is a subtype of this one.
6744         if (iface->IsAssignableFrom(current_iface.Get())) {
6745           return true;
6746         }
6747         break;
6748       }
6749     }
6750   }
6751   return false;
6752 }
6753 
6754 // Find the default method implementation for 'interface_method' in 'klass'. Stores it into
6755 // out_default_method and returns kDefaultFound on success. If no default method was found return
6756 // kAbstractFound and store nullptr into out_default_method. If an error occurs (such as a
6757 // default_method conflict) it will return kDefaultConflict.
FindDefaultMethodImplementation(Thread * self,ArtMethod * target_method,Handle<mirror::Class> klass,ArtMethod ** out_default_method) const6758 ClassLinker::DefaultMethodSearchResult ClassLinker::FindDefaultMethodImplementation(
6759     Thread* self,
6760     ArtMethod* target_method,
6761     Handle<mirror::Class> klass,
6762     /*out*/ArtMethod** out_default_method) const {
6763   DCHECK(self != nullptr);
6764   DCHECK(target_method != nullptr);
6765   DCHECK(out_default_method != nullptr);
6766 
6767   *out_default_method = nullptr;
6768 
6769   // We organize the interface table so that, for interface I any subinterfaces J follow it in the
6770   // table. This lets us walk the table backwards when searching for default methods.  The first one
6771   // we encounter is the best candidate since it is the most specific. Once we have found it we keep
6772   // track of it and then continue checking all other interfaces, since we need to throw an error if
6773   // we encounter conflicting default method implementations (one is not a subtype of the other).
6774   //
6775   // The order of unrelated interfaces does not matter and is not defined.
6776   size_t iftable_count = klass->GetIfTableCount();
6777   if (iftable_count == 0) {
6778     // No interfaces. We have already reset out to null so just return kAbstractFound.
6779     return DefaultMethodSearchResult::kAbstractFound;
6780   }
6781 
6782   StackHandleScope<3> hs(self);
6783   MutableHandle<mirror::Class> chosen_iface(hs.NewHandle<mirror::Class>(nullptr));
6784   MutableHandle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
6785   MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
6786   MethodNameAndSignatureComparator target_name_comparator(
6787       target_method->GetInterfaceMethodIfProxy(image_pointer_size_));
6788   // Iterates over the klass's iftable in reverse
6789   for (size_t k = iftable_count; k != 0; ) {
6790     --k;
6791 
6792     DCHECK_LT(k, iftable->Count());
6793 
6794     iface.Assign(iftable->GetInterface(k));
6795     // Iterate through every declared method on this interface. The order does not matter.
6796     for (auto& method_iter : iface->GetDeclaredVirtualMethods(image_pointer_size_)) {
6797       ArtMethod* current_method = &method_iter;
6798       // Skip abstract methods and methods with different names.
6799       if (current_method->IsAbstract() ||
6800           !target_name_comparator.HasSameNameAndSignature(
6801               current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6802         continue;
6803       } else if (!current_method->IsPublic()) {
6804         // The verifier should have caught the non-public method for dex version 37. Just warn and
6805         // skip it since this is from before default-methods so we don't really need to care that it
6806         // has code.
6807         LOG(WARNING) << "Interface method " << current_method->PrettyMethod()
6808                      << " is not public! "
6809                      << "This will be a fatal error in subsequent versions of android. "
6810                      << "Continuing anyway.";
6811       }
6812       if (UNLIKELY(chosen_iface != nullptr)) {
6813         // We have multiple default impls of the same method. This is a potential default conflict.
6814         // We need to check if this possibly conflicting method is either a superclass of the chosen
6815         // default implementation or is overridden by a non-default interface method. In either case
6816         // there is no conflict.
6817         if (!iface->IsAssignableFrom(chosen_iface.Get()) &&
6818             !ContainsOverridingMethodOf(self,
6819                                         target_name_comparator,
6820                                         iftable,
6821                                         k,
6822                                         iface,
6823                                         image_pointer_size_)) {
6824           VLOG(class_linker) << "Conflicting default method implementations found: "
6825                              << current_method->PrettyMethod() << " and "
6826                              << ArtMethod::PrettyMethod(*out_default_method) << " in class "
6827                              << klass->PrettyClass() << " conflict.";
6828           *out_default_method = nullptr;
6829           return DefaultMethodSearchResult::kDefaultConflict;
6830         } else {
6831           break;  // Continue checking at the next interface.
6832         }
6833       } else {
6834         // chosen_iface == null
6835         if (!ContainsOverridingMethodOf(self,
6836                                         target_name_comparator,
6837                                         iftable,
6838                                         k,
6839                                         iface,
6840                                         image_pointer_size_)) {
6841           // Don't set this as the chosen interface if something else is overriding it (because that
6842           // other interface would be potentially chosen instead if it was default). If the other
6843           // interface was abstract then we wouldn't select this interface as chosen anyway since
6844           // the abstract method masks it.
6845           *out_default_method = current_method;
6846           chosen_iface.Assign(iface.Get());
6847           // We should now finish traversing the graph to find if we have default methods that
6848           // conflict.
6849         } else {
6850           VLOG(class_linker) << "A default method '" << current_method->PrettyMethod()
6851                              << "' was "
6852                              << "skipped because it was overridden by an abstract method in a "
6853                              << "subinterface on class '" << klass->PrettyClass() << "'";
6854         }
6855       }
6856       break;
6857     }
6858   }
6859   if (*out_default_method != nullptr) {
6860     VLOG(class_linker) << "Default method '" << (*out_default_method)->PrettyMethod()
6861                        << "' selected "
6862                        << "as the implementation for '" << target_method->PrettyMethod()
6863                        << "' in '" << klass->PrettyClass() << "'";
6864     return DefaultMethodSearchResult::kDefaultFound;
6865   } else {
6866     return DefaultMethodSearchResult::kAbstractFound;
6867   }
6868 }
6869 
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6870 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6871                                                  ArtMethod* conflict_method,
6872                                                  ArtMethod* interface_method,
6873                                                  ArtMethod* method) {
6874   ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6875   Runtime* const runtime = Runtime::Current();
6876   LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6877 
6878   // Create a new entry if the existing one is the shared conflict method.
6879   ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6880       ? runtime->CreateImtConflictMethod(linear_alloc)
6881       : conflict_method;
6882 
6883   // Allocate a new table. Note that we will leak this table at the next conflict,
6884   // but that's a tradeoff compared to making the table fixed size.
6885   void* data = linear_alloc->Alloc(
6886       Thread::Current(), ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table,
6887                                                                        image_pointer_size_));
6888   if (data == nullptr) {
6889     LOG(ERROR) << "Failed to allocate conflict table";
6890     return conflict_method;
6891   }
6892   ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6893                                                             interface_method,
6894                                                             method,
6895                                                             image_pointer_size_);
6896 
6897   // Do a fence to ensure threads see the data in the table before it is assigned
6898   // to the conflict method.
6899   // Note that there is a race in the presence of multiple threads and we may leak
6900   // memory from the LinearAlloc, but that's a tradeoff compared to using
6901   // atomic operations.
6902   std::atomic_thread_fence(std::memory_order_release);
6903   new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6904   return new_conflict_method;
6905 }
6906 
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)6907 bool ClassLinker::AllocateIfTableMethodArrays(Thread* self,
6908                                               Handle<mirror::Class> klass,
6909                                               Handle<mirror::IfTable> iftable) {
6910   DCHECK(!klass->IsInterface());
6911   const bool has_superclass = klass->HasSuperClass();
6912   const bool extend_super_iftable = has_superclass;
6913   const size_t ifcount = klass->GetIfTableCount();
6914   const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
6915   for (size_t i = 0; i < ifcount; ++i) {
6916     size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
6917     if (num_methods > 0) {
6918       const bool is_super = i < super_ifcount;
6919       // This is an interface implemented by a super-class. Therefore we can just copy the method
6920       // array from the superclass.
6921       const bool super_interface = is_super && extend_super_iftable;
6922       ObjPtr<mirror::PointerArray> method_array;
6923       if (super_interface) {
6924         ObjPtr<mirror::IfTable> if_table = klass->GetSuperClass()->GetIfTable();
6925         DCHECK(if_table != nullptr);
6926         DCHECK(if_table->GetMethodArray(i) != nullptr);
6927         // If we are working on a super interface, try extending the existing method array.
6928         StackHandleScope<1u> hs(self);
6929         Handle<mirror::PointerArray> old_array = hs.NewHandle(if_table->GetMethodArray(i));
6930         method_array =
6931             ObjPtr<mirror::PointerArray>::DownCast(mirror::Object::Clone(old_array, self));
6932       } else {
6933         method_array = AllocPointerArray(self, num_methods);
6934       }
6935       if (UNLIKELY(method_array == nullptr)) {
6936         self->AssertPendingOOMException();
6937         return false;
6938       }
6939       iftable->SetMethodArray(i, method_array);
6940     }
6941   }
6942   return true;
6943 }
6944 
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6945 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6946                             ArtMethod* imt_conflict_method,
6947                             ArtMethod* current_method,
6948                             /*out*/bool* new_conflict,
6949                             /*out*/ArtMethod** imt_ref) {
6950   // Place method in imt if entry is empty, place conflict otherwise.
6951   if (*imt_ref == unimplemented_method) {
6952     *imt_ref = current_method;
6953   } else if (!(*imt_ref)->IsRuntimeMethod()) {
6954     // If we are not a conflict and we have the same signature and name as the imt
6955     // entry, it must be that we overwrote a superclass vtable entry.
6956     // Note that we have checked IsRuntimeMethod, as there may be multiple different
6957     // conflict methods.
6958     MethodNameAndSignatureComparator imt_comparator(
6959         (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6960     if (imt_comparator.HasSameNameAndSignature(
6961           current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6962       *imt_ref = current_method;
6963     } else {
6964       *imt_ref = imt_conflict_method;
6965       *new_conflict = true;
6966     }
6967   } else {
6968     // Place the default conflict method. Note that there may be an existing conflict
6969     // method in the IMT, but it could be one tailored to the super class, with a
6970     // specific ImtConflictTable.
6971     *imt_ref = imt_conflict_method;
6972     *new_conflict = true;
6973   }
6974 }
6975 
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6976 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6977   DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6978   DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6979   ArtMethod* imt_data[ImTable::kSize];
6980   Runtime* const runtime = Runtime::Current();
6981   ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6982   ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6983   std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6984   if (klass->GetIfTable() != nullptr) {
6985     bool new_conflict = false;
6986     FillIMTFromIfTable(klass->GetIfTable(),
6987                        unimplemented_method,
6988                        conflict_method,
6989                        klass,
6990                        /*create_conflict_tables=*/true,
6991                        /*ignore_copied_methods=*/false,
6992                        &new_conflict,
6993                        &imt_data[0]);
6994   }
6995   // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6996   // we can just use the same pointer.
6997   ImTable* imt = nullptr;
6998   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6999   if (super_class != nullptr && super_class->ShouldHaveImt()) {
7000     ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7001     bool same = true;
7002     for (size_t i = 0; same && i < ImTable::kSize; ++i) {
7003       ArtMethod* method = imt_data[i];
7004       ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
7005       if (method != super_method) {
7006         bool is_conflict_table = method->IsRuntimeMethod() &&
7007                                  method != unimplemented_method &&
7008                                  method != conflict_method;
7009         // Verify conflict contents.
7010         bool super_conflict_table = super_method->IsRuntimeMethod() &&
7011                                     super_method != unimplemented_method &&
7012                                     super_method != conflict_method;
7013         if (!is_conflict_table || !super_conflict_table) {
7014           same = false;
7015         } else {
7016           ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
7017           ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
7018           same = same && table1->Equals(table2, image_pointer_size_);
7019         }
7020       }
7021     }
7022     if (same) {
7023       imt = super_imt;
7024     }
7025   }
7026   if (imt == nullptr) {
7027     imt = klass->GetImt(image_pointer_size_);
7028     DCHECK(imt != nullptr);
7029     imt->Populate(imt_data, image_pointer_size_);
7030   } else {
7031     klass->SetImt(imt, image_pointer_size_);
7032   }
7033 }
7034 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)7035 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
7036                                                       LinearAlloc* linear_alloc,
7037                                                       PointerSize image_pointer_size) {
7038   void* data = linear_alloc->Alloc(Thread::Current(),
7039                                    ImtConflictTable::ComputeSize(count,
7040                                                                  image_pointer_size));
7041   return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
7042 }
7043 
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)7044 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
7045   return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
7046 }
7047 
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)7048 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
7049                                      ArtMethod* unimplemented_method,
7050                                      ArtMethod* imt_conflict_method,
7051                                      ObjPtr<mirror::Class> klass,
7052                                      bool create_conflict_tables,
7053                                      bool ignore_copied_methods,
7054                                      /*out*/bool* new_conflict,
7055                                      /*out*/ArtMethod** imt) {
7056   uint32_t conflict_counts[ImTable::kSize] = {};
7057   for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
7058     ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
7059     const size_t num_virtuals = interface->NumVirtualMethods();
7060     const size_t method_array_count = if_table->GetMethodArrayCount(i);
7061     // Virtual methods can be larger than the if table methods if there are default methods.
7062     DCHECK_GE(num_virtuals, method_array_count);
7063     if (kIsDebugBuild) {
7064       if (klass->IsInterface()) {
7065         DCHECK_EQ(method_array_count, 0u);
7066       } else {
7067         DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
7068       }
7069     }
7070     if (method_array_count == 0) {
7071       continue;
7072     }
7073     ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
7074     for (size_t j = 0; j < method_array_count; ++j) {
7075       ArtMethod* implementation_method =
7076           method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7077       if (ignore_copied_methods && implementation_method->IsCopied()) {
7078         continue;
7079       }
7080       DCHECK(implementation_method != nullptr);
7081       // Miranda methods cannot be used to implement an interface method, but they are safe to put
7082       // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
7083       // or interface methods in the IMT here they will not create extra conflicts since we compare
7084       // names and signatures in SetIMTRef.
7085       ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
7086       const uint32_t imt_index = interface_method->GetImtIndex();
7087 
7088       // There is only any conflicts if all of the interface methods for an IMT slot don't have
7089       // the same implementation method, keep track of this to avoid creating a conflict table in
7090       // this case.
7091 
7092       // Conflict table size for each IMT slot.
7093       ++conflict_counts[imt_index];
7094 
7095       SetIMTRef(unimplemented_method,
7096                 imt_conflict_method,
7097                 implementation_method,
7098                 /*out*/new_conflict,
7099                 /*out*/&imt[imt_index]);
7100     }
7101   }
7102 
7103   if (create_conflict_tables) {
7104     // Create the conflict tables.
7105     LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
7106     for (size_t i = 0; i < ImTable::kSize; ++i) {
7107       size_t conflicts = conflict_counts[i];
7108       if (imt[i] == imt_conflict_method) {
7109         ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
7110         if (new_table != nullptr) {
7111           ArtMethod* new_conflict_method =
7112               Runtime::Current()->CreateImtConflictMethod(linear_alloc);
7113           new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
7114           imt[i] = new_conflict_method;
7115         } else {
7116           LOG(ERROR) << "Failed to allocate conflict table";
7117           imt[i] = imt_conflict_method;
7118         }
7119       } else {
7120         DCHECK_NE(imt[i], imt_conflict_method);
7121       }
7122     }
7123 
7124     for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
7125       ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
7126       const size_t method_array_count = if_table->GetMethodArrayCount(i);
7127       // Virtual methods can be larger than the if table methods if there are default methods.
7128       if (method_array_count == 0) {
7129         continue;
7130       }
7131       ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
7132       for (size_t j = 0; j < method_array_count; ++j) {
7133         ArtMethod* implementation_method =
7134             method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
7135         if (ignore_copied_methods && implementation_method->IsCopied()) {
7136           continue;
7137         }
7138         DCHECK(implementation_method != nullptr);
7139         ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
7140         const uint32_t imt_index = interface_method->GetImtIndex();
7141         if (!imt[imt_index]->IsRuntimeMethod() ||
7142             imt[imt_index] == unimplemented_method ||
7143             imt[imt_index] == imt_conflict_method) {
7144           continue;
7145         }
7146         ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
7147         const size_t num_entries = table->NumEntries(image_pointer_size_);
7148         table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
7149         table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
7150       }
7151     }
7152   }
7153 }
7154 
7155 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
7156 // set.
NotSubinterfaceOfAny(const HashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)7157 static bool NotSubinterfaceOfAny(
7158     const HashSet<mirror::Class*>& classes,
7159     ObjPtr<mirror::Class> val)
7160     REQUIRES(Roles::uninterruptible_)
7161     REQUIRES_SHARED(Locks::mutator_lock_) {
7162   DCHECK(val != nullptr);
7163   for (ObjPtr<mirror::Class> c : classes) {
7164     if (val->IsAssignableFrom(c)) {
7165       return false;
7166     }
7167   }
7168   return true;
7169 }
7170 
7171 // Fills in and flattens the interface inheritance hierarchy.
7172 //
7173 // By the end of this function all interfaces in the transitive closure of to_process are added to
7174 // the iftable and every interface precedes all of its sub-interfaces in this list.
7175 //
7176 // all I, J: Interface | I <: J implies J precedes I
7177 //
7178 // (note A <: B means that A is a subtype of B)
7179 //
7180 // This returns the total number of items in the iftable. The iftable might be resized down after
7181 // this call.
7182 //
7183 // We order this backwards so that we do not need to reorder superclass interfaces when new
7184 // interfaces are added in subclass's interface tables.
7185 //
7186 // Upon entry into this function iftable is a copy of the superclass's iftable with the first
7187 // super_ifcount entries filled in with the transitive closure of the interfaces of the superclass.
7188 // The other entries are uninitialized.  We will fill in the remaining entries in this function. The
7189 // iftable must be large enough to hold all interfaces without changing its size.
FillIfTable(Thread * self,ObjPtr<mirror::Class> klass,ObjPtr<mirror::ObjectArray<mirror::Class>> interfaces,ObjPtr<mirror::IfTable> iftable,size_t super_ifcount,size_t num_interfaces)7190 static size_t FillIfTable(Thread* self,
7191                           ObjPtr<mirror::Class> klass,
7192                           ObjPtr<mirror::ObjectArray<mirror::Class>> interfaces,
7193                           ObjPtr<mirror::IfTable> iftable,
7194                           size_t super_ifcount,
7195                           size_t num_interfaces)
7196     REQUIRES_SHARED(Locks::mutator_lock_) {
7197   ScopedAssertNoThreadSuspension nts(__FUNCTION__);
7198   // This is the set of all classes already in the iftable. Used to make checking
7199   // if a class has already been added quicker.
7200   constexpr size_t kBufferSize = 32;  // 256 bytes on 64-bit architectures.
7201   mirror::Class* buffer[kBufferSize];
7202   HashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize);
7203   // The first super_ifcount elements are from the superclass. We note that they are already added.
7204   for (size_t i = 0; i < super_ifcount; i++) {
7205     ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7206     DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
7207     classes_in_iftable.insert(iface.Ptr());
7208   }
7209   size_t filled_ifcount = super_ifcount;
7210   const bool have_interfaces = interfaces != nullptr;
7211   for (size_t i = 0; i != num_interfaces; ++i) {
7212     ObjPtr<mirror::Class> interface = have_interfaces
7213         ? interfaces->Get(i)
7214         : mirror::Class::GetDirectInterface(self, klass, i);
7215 
7216     // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7217     // At this point in the loop current-iface-list has the invariant that:
7218     //    for every pair of interfaces I,J within it:
7219     //      if index_of(I) < index_of(J) then I is not a subtype of J
7220 
7221     // If we have already seen this element then all of its super-interfaces must already be in the
7222     // current-iface-list so we can skip adding it.
7223     if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
7224       // We haven't seen this interface so add all of its super-interfaces onto the
7225       // current-iface-list, skipping those already on it.
7226       int32_t ifcount = interface->GetIfTableCount();
7227       for (int32_t j = 0; j < ifcount; j++) {
7228         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7229         if (!ContainsElement(classes_in_iftable, super_interface)) {
7230           DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
7231           classes_in_iftable.insert(super_interface.Ptr());
7232           iftable->SetInterface(filled_ifcount, super_interface);
7233           filled_ifcount++;
7234         }
7235       }
7236       DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7237       // Place this interface onto the current-iface-list after all of its super-interfaces.
7238       classes_in_iftable.insert(interface.Ptr());
7239       iftable->SetInterface(filled_ifcount, interface);
7240       filled_ifcount++;
7241     } else if (kIsDebugBuild) {
7242       // Check all super-interfaces are already in the list.
7243       int32_t ifcount = interface->GetIfTableCount();
7244       for (int32_t j = 0; j < ifcount; j++) {
7245         ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7246         DCHECK(ContainsElement(classes_in_iftable, super_interface))
7247             << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7248             << ", a superinterface of " << interface->PrettyClass();
7249       }
7250     }
7251   }
7252   if (kIsDebugBuild) {
7253     // Check that the iftable is ordered correctly.
7254     for (size_t i = 0; i < filled_ifcount; i++) {
7255       ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
7256       for (size_t j = i + 1; j < filled_ifcount; j++) {
7257         ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
7258         // !(if_a <: if_b)
7259         CHECK(!if_b->IsAssignableFrom(if_a))
7260             << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7261             << ") extends "
7262             << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
7263             << "interface list.";
7264       }
7265     }
7266   }
7267   return filled_ifcount;
7268 }
7269 
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces)7270 bool ClassLinker::SetupInterfaceLookupTable(Thread* self,
7271                                             Handle<mirror::Class> klass,
7272                                             Handle<mirror::ObjectArray<mirror::Class>> interfaces) {
7273   StackHandleScope<1> hs(self);
7274   const bool has_superclass = klass->HasSuperClass();
7275   const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
7276   const bool have_interfaces = interfaces != nullptr;
7277   const size_t num_interfaces =
7278       have_interfaces ? interfaces->GetLength() : klass->NumDirectInterfaces();
7279   if (num_interfaces == 0) {
7280     if (super_ifcount == 0) {
7281       if (LIKELY(has_superclass)) {
7282         klass->SetIfTable(klass->GetSuperClass()->GetIfTable());
7283       }
7284       // Class implements no interfaces.
7285       DCHECK_EQ(klass->GetIfTableCount(), 0);
7286       return true;
7287     }
7288     // Class implements same interfaces as parent, are any of these not marker interfaces?
7289     bool has_non_marker_interface = false;
7290     ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7291     for (size_t i = 0; i < super_ifcount; ++i) {
7292       if (super_iftable->GetMethodArrayCount(i) > 0) {
7293         has_non_marker_interface = true;
7294         break;
7295       }
7296     }
7297     // Class just inherits marker interfaces from parent so recycle parent's iftable.
7298     if (!has_non_marker_interface) {
7299       klass->SetIfTable(super_iftable);
7300       return true;
7301     }
7302   }
7303   size_t ifcount = super_ifcount + num_interfaces;
7304   // Check that every class being implemented is an interface.
7305   for (size_t i = 0; i < num_interfaces; i++) {
7306     ObjPtr<mirror::Class> interface = have_interfaces
7307         ? interfaces->GetWithoutChecks(i)
7308         : mirror::Class::GetDirectInterface(self, klass.Get(), i);
7309     DCHECK(interface != nullptr);
7310     if (UNLIKELY(!interface->IsInterface())) {
7311       std::string temp;
7312       ThrowIncompatibleClassChangeError(klass.Get(),
7313                                         "Class %s implements non-interface class %s",
7314                                         klass->PrettyDescriptor().c_str(),
7315                                         PrettyDescriptor(interface->GetDescriptor(&temp)).c_str());
7316       return false;
7317     }
7318     ifcount += interface->GetIfTableCount();
7319   }
7320   // Create the interface function table.
7321   MutableHandle<mirror::IfTable> iftable(hs.NewHandle(AllocIfTable(self, ifcount)));
7322   if (UNLIKELY(iftable == nullptr)) {
7323     self->AssertPendingOOMException();
7324     return false;
7325   }
7326   // Fill in table with superclass's iftable.
7327   if (super_ifcount != 0) {
7328     ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7329     for (size_t i = 0; i < super_ifcount; i++) {
7330       ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
7331       iftable->SetInterface(i, super_interface);
7332     }
7333   }
7334 
7335   // Note that AllowThreadSuspension is to thread suspension as pthread_testcancel is to pthread
7336   // cancellation. That is it will suspend if one has a pending suspend request but otherwise
7337   // doesn't really do anything.
7338   self->AllowThreadSuspension();
7339 
7340   const size_t new_ifcount = FillIfTable(
7341       self, klass.Get(), interfaces.Get(), iftable.Get(), super_ifcount, num_interfaces);
7342 
7343   self->AllowThreadSuspension();
7344 
7345   // Shrink iftable in case duplicates were found
7346   if (new_ifcount < ifcount) {
7347     DCHECK_NE(num_interfaces, 0U);
7348     iftable.Assign(ObjPtr<mirror::IfTable>::DownCast(
7349         mirror::IfTable::CopyOf(iftable, self, new_ifcount * mirror::IfTable::kMax)));
7350     if (UNLIKELY(iftable == nullptr)) {
7351       self->AssertPendingOOMException();
7352       return false;
7353     }
7354     ifcount = new_ifcount;
7355   } else {
7356     DCHECK_EQ(new_ifcount, ifcount);
7357   }
7358   klass->SetIfTable(iftable.Get());
7359   return true;
7360 }
7361 
7362 // Finds the method with a name/signature that matches cmp in the given lists of methods. The list
7363 // of methods must be unique.
FindSameNameAndSignature(MethodNameAndSignatureComparator & cmp ATTRIBUTE_UNUSED)7364 static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp ATTRIBUTE_UNUSED) {
7365   return nullptr;
7366 }
7367 
7368 template <typename ... Types>
FindSameNameAndSignature(MethodNameAndSignatureComparator & cmp,const ScopedArenaVector<ArtMethod * > & list,const Types &...rest)7369 static ArtMethod* FindSameNameAndSignature(MethodNameAndSignatureComparator& cmp,
7370                                            const ScopedArenaVector<ArtMethod*>& list,
7371                                            const Types& ... rest)
7372     REQUIRES_SHARED(Locks::mutator_lock_) {
7373   for (ArtMethod* method : list) {
7374     if (cmp.HasSameNameAndSignature(method)) {
7375       return method;
7376     }
7377   }
7378   return FindSameNameAndSignature(cmp, rest...);
7379 }
7380 
7381 namespace {
7382 
7383 // Check that all vtable entries are present in this class's virtuals or are the same as a
7384 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7385 void CheckClassOwnsVTableEntries(Thread* self,
7386                                  Handle<mirror::Class> klass,
7387                                  PointerSize pointer_size)
7388     REQUIRES_SHARED(Locks::mutator_lock_) {
7389   StackHandleScope<2> hs(self);
7390   Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7391   ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
7392   Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
7393   int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
7394   for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7395     ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7396     CHECK(m != nullptr);
7397 
7398     if (m->GetMethodIndexDuringLinking() != i) {
7399       LOG(WARNING) << m->PrettyMethod()
7400                    << " has an unexpected method index for its spot in the vtable for class"
7401                    << klass->PrettyClass();
7402     }
7403     ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7404     auto is_same_method = [m] (const ArtMethod& meth) {
7405       return &meth == m;
7406     };
7407     if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7408           std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7409       LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7410                    << klass->PrettyClass() << " or any of its superclasses!";
7411     }
7412   }
7413 }
7414 
7415 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7416 // method is overridden in a subclass.
7417 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7418 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7419     REQUIRES_SHARED(Locks::mutator_lock_) {
7420   StackHandleScope<1> hs(self);
7421   Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7422   int32_t num_entries = vtable->GetLength();
7423 
7424   // Observations:
7425   //   * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7426   //   * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7427   //     for many classes outside of libcore a cross-dexfile check has to be run anyways.
7428   //   * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7429   //     to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7430   //   * The single-pass algorithm will trade memory for speed, but that is OK.
7431 
7432   CHECK_GT(num_entries, 0);
7433 
7434   auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7435     ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7436     ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7437     LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7438                  << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7439                 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7440                 << m2->PrettyMethod() << "  (0x" << std::hex
7441                 << reinterpret_cast<uintptr_t>(m2) << ")";
7442   };
7443   struct BaseHashType {
7444     static size_t HashCombine(size_t seed, size_t val) {
7445       return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7446     }
7447   };
7448 
7449   // Check assuming all entries come from the same dex file.
7450   {
7451     // Find the first interesting method and its dex file.
7452     int32_t start = 0;
7453     for (; start < num_entries; ++start) {
7454       ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7455       // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7456       // maybe).
7457       if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7458                                   vtable_entry->GetAccessFlags())) {
7459         continue;
7460       }
7461       break;
7462     }
7463     if (start == num_entries) {
7464       return;
7465     }
7466     const DexFile* dex_file =
7467         vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7468             GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7469 
7470     // Helper function to avoid logging if we have to run the cross-file checks.
7471     auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7472       // Use a map to store seen entries, as the storage space is too large for a bitvector.
7473       using PairType = std::pair<uint32_t, uint16_t>;
7474       struct PairHash : BaseHashType {
7475         size_t operator()(const PairType& key) const {
7476           return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7477         }
7478       };
7479       HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
7480       seen.reserve(2 * num_entries);
7481       bool need_slow_path = false;
7482       bool found_dup = false;
7483       for (int i = start; i < num_entries; ++i) {
7484         // Can use Unchecked here as the start loop already ensured that the arrays are correct
7485         // wrt/ kPointerSize.
7486         ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7487         if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7488                                     vtable_entry->GetAccessFlags())) {
7489           continue;
7490         }
7491         ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7492         if (dex_file != m->GetDexFile()) {
7493           need_slow_path = true;
7494           break;
7495         }
7496         const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7497         PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7498         auto it = seen.find(pair);
7499         if (it != seen.end()) {
7500           found_dup = true;
7501           if (log_warn) {
7502             log_fn(it->second, i);
7503           }
7504         } else {
7505           seen.insert(std::make_pair(pair, i));
7506         }
7507       }
7508       return std::make_pair(need_slow_path, found_dup);
7509     };
7510     std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7511     if (!result.first) {
7512       if (result.second) {
7513         check_fn(/* log_warn= */ true);
7514       }
7515       return;
7516     }
7517   }
7518 
7519   // Need to check across dex files.
7520   struct Entry {
7521     size_t cached_hash = 0;
7522     uint32_t name_len = 0;
7523     const char* name = nullptr;
7524     Signature signature = Signature::NoSignature();
7525 
7526     Entry() = default;
7527     Entry(const Entry& other) = default;
7528     Entry& operator=(const Entry& other) = default;
7529 
7530     Entry(const DexFile* dex_file, const dex::MethodId& mid)
7531         : name_len(0),  // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7532           // This call writes `name_len` and it is therefore necessary that the
7533           // initializer for `name_len` comes before it, otherwise the value
7534           // from the call would be overwritten by that initializer.
7535           name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
7536           signature(dex_file->GetMethodSignature(mid)) {
7537       // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7538       if (name[name_len] != 0) {
7539         name_len += strlen(name + name_len);
7540       }
7541     }
7542 
7543     bool operator==(const Entry& other) const {
7544       return name_len == other.name_len &&
7545              memcmp(name, other.name, name_len) == 0 &&
7546              signature == other.signature;
7547     }
7548   };
7549   struct EntryHash {
7550     size_t operator()(const Entry& key) const {
7551       return key.cached_hash;
7552     }
7553   };
7554   HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
7555   for (int32_t i = 0; i < num_entries; ++i) {
7556     // Can use Unchecked here as the first loop already ensured that the arrays are correct
7557     // wrt/ kPointerSize.
7558     ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7559     // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7560     // maybe).
7561     if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7562                                 vtable_entry->GetAccessFlags())) {
7563       continue;
7564     }
7565     ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7566     const DexFile* dex_file = m->GetDexFile();
7567     const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7568 
7569     Entry e(dex_file, mid);
7570 
7571     size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7572     size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7573     e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7574                                               sig_hash);
7575 
7576     auto it = map.find(e);
7577     if (it != map.end()) {
7578       log_fn(it->second, i);
7579     } else {
7580       map.insert(std::make_pair(e, i));
7581     }
7582   }
7583 }
7584 
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7585 void CheckVTableHasNoDuplicates(Thread* self,
7586                                 Handle<mirror::Class> klass,
7587                                 PointerSize pointer_size)
7588     REQUIRES_SHARED(Locks::mutator_lock_) {
7589   switch (pointer_size) {
7590     case PointerSize::k64:
7591       CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7592       break;
7593     case PointerSize::k32:
7594       CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7595       break;
7596   }
7597 }
7598 
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7599 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7600     REQUIRES_SHARED(Locks::mutator_lock_) {
7601   CheckClassOwnsVTableEntries(self, klass, pointer_size);
7602   CheckVTableHasNoDuplicates(self, klass, pointer_size);
7603 }
7604 
7605 }  // namespace
7606 
FillImtFromSuperClass(Handle<mirror::Class> klass,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,bool * new_conflict,ArtMethod ** imt)7607 void ClassLinker::FillImtFromSuperClass(Handle<mirror::Class> klass,
7608                                         ArtMethod* unimplemented_method,
7609                                         ArtMethod* imt_conflict_method,
7610                                         bool* new_conflict,
7611                                         ArtMethod** imt) {
7612   DCHECK(klass->HasSuperClass());
7613   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
7614   if (super_class->ShouldHaveImt()) {
7615     ImTable* super_imt = super_class->GetImt(image_pointer_size_);
7616     for (size_t i = 0; i < ImTable::kSize; ++i) {
7617       imt[i] = super_imt->Get(i, image_pointer_size_);
7618     }
7619   } else {
7620     // No imt in the super class, need to reconstruct from the iftable.
7621     ObjPtr<mirror::IfTable> if_table = super_class->GetIfTable();
7622     if (if_table->Count() != 0) {
7623       // Ignore copied methods since we will handle these in LinkInterfaceMethods.
7624       FillIMTFromIfTable(if_table,
7625                          unimplemented_method,
7626                          imt_conflict_method,
7627                          klass.Get(),
7628                          /*create_conflict_tables=*/false,
7629                          /*ignore_copied_methods=*/true,
7630                          /*out*/new_conflict,
7631                          /*out*/imt);
7632     }
7633   }
7634 }
7635 
7636 class ClassLinker::LinkInterfaceMethodsHelper {
7637  public:
LinkInterfaceMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7638   LinkInterfaceMethodsHelper(ClassLinker* class_linker,
7639                              Handle<mirror::Class> klass,
7640                              Thread* self,
7641                              Runtime* runtime)
7642       : class_linker_(class_linker),
7643         klass_(klass),
7644         method_alignment_(ArtMethod::Alignment(class_linker->GetImagePointerSize())),
7645         method_size_(ArtMethod::Size(class_linker->GetImagePointerSize())),
7646         self_(self),
7647         stack_(runtime->GetLinearAlloc()->GetArenaPool()),
7648         allocator_(&stack_),
7649         default_conflict_methods_(allocator_.Adapter()),
7650         overriding_default_conflict_methods_(allocator_.Adapter()),
7651         miranda_methods_(allocator_.Adapter()),
7652         default_methods_(allocator_.Adapter()),
7653         overriding_default_methods_(allocator_.Adapter()),
7654         move_table_(allocator_.Adapter()) {
7655   }
7656 
7657   ArtMethod* FindMethod(ArtMethod* interface_method,
7658                         MethodNameAndSignatureComparator& interface_name_comparator,
7659                         ArtMethod* vtable_impl)
7660       REQUIRES_SHARED(Locks::mutator_lock_);
7661 
7662   ArtMethod* GetOrCreateMirandaMethod(ArtMethod* interface_method,
7663                                       MethodNameAndSignatureComparator& interface_name_comparator)
7664       REQUIRES_SHARED(Locks::mutator_lock_);
7665 
HasNewVirtuals() const7666   bool HasNewVirtuals() const {
7667     return !(miranda_methods_.empty() &&
7668              default_methods_.empty() &&
7669              overriding_default_methods_.empty() &&
7670              overriding_default_conflict_methods_.empty() &&
7671              default_conflict_methods_.empty());
7672   }
7673 
7674   void ReallocMethods() REQUIRES_SHARED(Locks::mutator_lock_);
7675 
7676   ObjPtr<mirror::PointerArray> UpdateVtable(
7677       const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
7678       Handle<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
7679 
7680   void UpdateIfTable(Handle<mirror::IfTable> iftable) REQUIRES_SHARED(Locks::mutator_lock_);
7681 
7682   void UpdateIMT(ArtMethod** out_imt);
7683 
CheckNoStaleMethodsInDexCache()7684   void CheckNoStaleMethodsInDexCache() REQUIRES_SHARED(Locks::mutator_lock_) {
7685     if (kIsDebugBuild) {
7686       PointerSize pointer_size = class_linker_->GetImagePointerSize();
7687       // Check that there are no stale methods are in the dex cache array.
7688       auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
7689       for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
7690         auto pair = mirror::DexCache::GetNativePair(resolved_methods, i);
7691         ArtMethod* m = pair.object;
7692         CHECK(move_table_.find(m) == move_table_.end() ||
7693               // The original versions of copied methods will still be present so allow those too.
7694               // Note that if the first check passes this might fail to GetDeclaringClass().
7695               std::find_if(m->GetDeclaringClass()->GetMethods(pointer_size).begin(),
7696                            m->GetDeclaringClass()->GetMethods(pointer_size).end(),
7697                            [m] (ArtMethod& meth) {
7698                              return &meth == m;
7699                            }) != m->GetDeclaringClass()->GetMethods(pointer_size).end())
7700             << "Obsolete method " << m->PrettyMethod() << " is in dex cache!";
7701       }
7702     }
7703   }
7704 
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7705   void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7706                          LengthPrefixedArray<ArtMethod>* methods) {
7707     if (kIsDebugBuild) {
7708       CHECK(methods != nullptr);
7709       // Put some random garbage in old methods to help find stale pointers.
7710       if (methods != old_methods && old_methods != nullptr) {
7711         // Need to make sure the GC is not running since it could be scanning the methods we are
7712         // about to overwrite.
7713         ScopedThreadStateChange tsc(self_, kSuspended);
7714         gc::ScopedGCCriticalSection gcs(self_,
7715                                         gc::kGcCauseClassLinker,
7716                                         gc::kCollectorTypeClassLinker);
7717         const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7718                                                                             method_size_,
7719                                                                             method_alignment_);
7720         memset(old_methods, 0xFEu, old_size);
7721       }
7722     }
7723   }
7724 
7725  private:
NumberOfNewVirtuals() const7726   size_t NumberOfNewVirtuals() const {
7727     return miranda_methods_.size() +
7728            default_methods_.size() +
7729            overriding_default_conflict_methods_.size() +
7730            overriding_default_methods_.size() +
7731            default_conflict_methods_.size();
7732   }
7733 
FillTables()7734   bool FillTables() REQUIRES_SHARED(Locks::mutator_lock_) {
7735     return !klass_->IsInterface();
7736   }
7737 
LogNewVirtuals() const7738   void LogNewVirtuals() const REQUIRES_SHARED(Locks::mutator_lock_) {
7739     DCHECK(!klass_->IsInterface() || (default_methods_.empty() && miranda_methods_.empty()))
7740         << "Interfaces should only have default-conflict methods appended to them.";
7741     VLOG(class_linker) << mirror::Class::PrettyClass(klass_.Get()) << ": miranda_methods="
7742                        << miranda_methods_.size()
7743                        << " default_methods=" << default_methods_.size()
7744                        << " overriding_default_methods=" << overriding_default_methods_.size()
7745                        << " default_conflict_methods=" << default_conflict_methods_.size()
7746                        << " overriding_default_conflict_methods="
7747                        << overriding_default_conflict_methods_.size();
7748   }
7749 
7750   ClassLinker* class_linker_;
7751   Handle<mirror::Class> klass_;
7752   size_t method_alignment_;
7753   size_t method_size_;
7754   Thread* const self_;
7755 
7756   // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7757   // the virtual methods array.
7758   // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7759   // during cross compilation.
7760   // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7761   ArenaStack stack_;
7762   ScopedArenaAllocator allocator_;
7763 
7764   ScopedArenaVector<ArtMethod*> default_conflict_methods_;
7765   ScopedArenaVector<ArtMethod*> overriding_default_conflict_methods_;
7766   ScopedArenaVector<ArtMethod*> miranda_methods_;
7767   ScopedArenaVector<ArtMethod*> default_methods_;
7768   ScopedArenaVector<ArtMethod*> overriding_default_methods_;
7769 
7770   ScopedArenaUnorderedMap<ArtMethod*, ArtMethod*> move_table_;
7771 };
7772 
FindMethod(ArtMethod * interface_method,MethodNameAndSignatureComparator & interface_name_comparator,ArtMethod * vtable_impl)7773 ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::FindMethod(
7774     ArtMethod* interface_method,
7775     MethodNameAndSignatureComparator& interface_name_comparator,
7776     ArtMethod* vtable_impl) {
7777   ArtMethod* current_method = nullptr;
7778   switch (class_linker_->FindDefaultMethodImplementation(self_,
7779                                                          interface_method,
7780                                                          klass_,
7781                                                          /*out*/&current_method)) {
7782     case DefaultMethodSearchResult::kDefaultConflict: {
7783       // Default method conflict.
7784       DCHECK(current_method == nullptr);
7785       ArtMethod* default_conflict_method = nullptr;
7786       if (vtable_impl != nullptr && vtable_impl->IsDefaultConflicting()) {
7787         // We can reuse the method from the superclass, don't bother adding it to virtuals.
7788         default_conflict_method = vtable_impl;
7789       } else {
7790         // See if we already have a conflict method for this method.
7791         ArtMethod* preexisting_conflict = FindSameNameAndSignature(
7792             interface_name_comparator,
7793             default_conflict_methods_,
7794             overriding_default_conflict_methods_);
7795         if (LIKELY(preexisting_conflict != nullptr)) {
7796           // We already have another conflict we can reuse.
7797           default_conflict_method = preexisting_conflict;
7798         } else {
7799           // Note that we do this even if we are an interface since we need to create this and
7800           // cannot reuse another classes.
7801           // Create a new conflict method for this to use.
7802           default_conflict_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7803           new(default_conflict_method) ArtMethod(interface_method,
7804                                                  class_linker_->GetImagePointerSize());
7805           if (vtable_impl == nullptr) {
7806             // Save the conflict method. We need to add it to the vtable.
7807             default_conflict_methods_.push_back(default_conflict_method);
7808           } else {
7809             // Save the conflict method but it is already in the vtable.
7810             overriding_default_conflict_methods_.push_back(default_conflict_method);
7811           }
7812         }
7813       }
7814       current_method = default_conflict_method;
7815       break;
7816     }  // case kDefaultConflict
7817     case DefaultMethodSearchResult::kDefaultFound: {
7818       DCHECK(current_method != nullptr);
7819       // Found a default method.
7820       if (vtable_impl != nullptr &&
7821           current_method->GetDeclaringClass() == vtable_impl->GetDeclaringClass()) {
7822         // We found a default method but it was the same one we already have from our
7823         // superclass. Don't bother adding it to our vtable again.
7824         current_method = vtable_impl;
7825       } else if (LIKELY(FillTables())) {
7826         // Interfaces don't need to copy default methods since they don't have vtables.
7827         // Only record this default method if it is new to save space.
7828         // TODO It might be worthwhile to copy default methods on interfaces anyway since it
7829         //      would make lookup for interface super much faster. (We would only need to scan
7830         //      the iftable to find if there is a NSME or AME.)
7831         ArtMethod* old = FindSameNameAndSignature(interface_name_comparator,
7832                                                   default_methods_,
7833                                                   overriding_default_methods_);
7834         if (old == nullptr) {
7835           // We found a default method implementation and there were no conflicts.
7836           if (vtable_impl == nullptr) {
7837             // Save the default method. We need to add it to the vtable.
7838             default_methods_.push_back(current_method);
7839           } else {
7840             // Save the default method but it is already in the vtable.
7841             overriding_default_methods_.push_back(current_method);
7842           }
7843         } else {
7844           CHECK(old == current_method) << "Multiple default implementations selected!";
7845         }
7846       }
7847       break;
7848     }  // case kDefaultFound
7849     case DefaultMethodSearchResult::kAbstractFound: {
7850       DCHECK(current_method == nullptr);
7851       // Abstract method masks all defaults.
7852       if (vtable_impl != nullptr &&
7853           vtable_impl->IsAbstract() &&
7854           !vtable_impl->IsDefaultConflicting()) {
7855         // We need to make this an abstract method but the version in the vtable already is so
7856         // don't do anything.
7857         current_method = vtable_impl;
7858       }
7859       break;
7860     }  // case kAbstractFound
7861   }
7862   return current_method;
7863 }
7864 
GetOrCreateMirandaMethod(ArtMethod * interface_method,MethodNameAndSignatureComparator & interface_name_comparator)7865 ArtMethod* ClassLinker::LinkInterfaceMethodsHelper::GetOrCreateMirandaMethod(
7866     ArtMethod* interface_method,
7867     MethodNameAndSignatureComparator& interface_name_comparator) {
7868   // Find out if there is already a miranda method we can use.
7869   ArtMethod* miranda_method = FindSameNameAndSignature(interface_name_comparator,
7870                                                        miranda_methods_);
7871   if (miranda_method == nullptr) {
7872     DCHECK(interface_method->IsAbstract()) << interface_method->PrettyMethod();
7873     miranda_method = reinterpret_cast<ArtMethod*>(allocator_.Alloc(method_size_));
7874     CHECK(miranda_method != nullptr);
7875     // Point the interface table at a phantom slot.
7876     new(miranda_method) ArtMethod(interface_method, class_linker_->GetImagePointerSize());
7877     miranda_methods_.push_back(miranda_method);
7878   }
7879   return miranda_method;
7880 }
7881 
ReallocMethods()7882 void ClassLinker::LinkInterfaceMethodsHelper::ReallocMethods() {
7883   LogNewVirtuals();
7884 
7885   const size_t old_method_count = klass_->NumMethods();
7886   const size_t new_method_count = old_method_count + NumberOfNewVirtuals();
7887   DCHECK_NE(old_method_count, new_method_count);
7888 
7889   // Attempt to realloc to save RAM if possible.
7890   LengthPrefixedArray<ArtMethod>* old_methods = klass_->GetMethodsPtr();
7891   // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7892   // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7893   // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7894   // CopyFrom has internal read barriers.
7895   //
7896   // TODO We should maybe move some of this into mirror::Class or at least into another method.
7897   const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
7898                                                                       method_size_,
7899                                                                       method_alignment_);
7900   const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
7901                                                                       method_size_,
7902                                                                       method_alignment_);
7903   const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7904   auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
7905       class_linker_->GetAllocatorForClassLoader(klass_->GetClassLoader())->Realloc(
7906           self_, old_methods, old_methods_ptr_size, new_size));
7907   CHECK(methods != nullptr);  // Native allocation failure aborts.
7908 
7909   PointerSize pointer_size = class_linker_->GetImagePointerSize();
7910   if (methods != old_methods) {
7911     // Maps from heap allocated miranda method to linear alloc miranda method.
7912     StrideIterator<ArtMethod> out = methods->begin(method_size_, method_alignment_);
7913     // Copy over the old methods.
7914     for (auto& m : klass_->GetMethods(pointer_size)) {
7915       move_table_.emplace(&m, &*out);
7916       // The CopyFrom is only necessary to not miss read barriers since Realloc won't do read
7917       // barriers when it copies.
7918       out->CopyFrom(&m, pointer_size);
7919       ++out;
7920     }
7921   }
7922   StrideIterator<ArtMethod> out(methods->begin(method_size_, method_alignment_) + old_method_count);
7923   // Copy over miranda methods before copying vtable since CopyOf may cause thread suspension and
7924   // we want the roots of the miranda methods to get visited.
7925   for (size_t i = 0; i < miranda_methods_.size(); ++i) {
7926     ArtMethod* mir_method = miranda_methods_[i];
7927     ArtMethod& new_method = *out;
7928     new_method.CopyFrom(mir_method, pointer_size);
7929     uint32_t access_flags = new_method.GetAccessFlags();
7930     DCHECK_EQ(access_flags & kAccIntrinsic, 0u) << "Miranda method should not be an intrinsic!";
7931     DCHECK_EQ(access_flags & kAccDefault, 0u) << "Miranda method should not be a default method!";
7932     DCHECK_NE(access_flags & kAccAbstract, 0u) << "Miranda method should be abstract!";
7933     new_method.SetAccessFlags(access_flags | kAccCopied);
7934     move_table_.emplace(mir_method, &new_method);
7935     // Update the entry in the method array, as the array will be used for future lookups,
7936     // where thread suspension is allowed.
7937     // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7938     // would not see them.
7939     miranda_methods_[i] = &new_method;
7940     ++out;
7941   }
7942   // We need to copy the default methods into our own method table since the runtime requires that
7943   // every method on a class's vtable be in that respective class's virtual method table.
7944   // NOTE This means that two classes might have the same implementation of a method from the same
7945   // interface but will have different ArtMethod*s for them. This also means we cannot compare a
7946   // default method found on a class with one found on the declaring interface directly and must
7947   // look at the declaring class to determine if they are the same.
7948   for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_methods_,
7949                                                      &overriding_default_methods_}) {
7950     for (size_t i = 0; i < methods_vec->size(); ++i) {
7951       ArtMethod* def_method = (*methods_vec)[i];
7952       ArtMethod& new_method = *out;
7953       new_method.CopyFrom(def_method, pointer_size);
7954       // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7955       // verified yet it shouldn't have methods that are skipping access checks.
7956       // TODO This is rather arbitrary. We should maybe support classes where only some of its
7957       // methods are skip_access_checks.
7958       DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7959       constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7960       constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7961       new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7962       move_table_.emplace(def_method, &new_method);
7963       // Update the entry in the method array, as the array will be used for future lookups,
7964       // where thread suspension is allowed.
7965       // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
7966       // would not see them.
7967       (*methods_vec)[i] = &new_method;
7968       ++out;
7969     }
7970   }
7971   for (ScopedArenaVector<ArtMethod*>* methods_vec : {&default_conflict_methods_,
7972                                                      &overriding_default_conflict_methods_}) {
7973     for (size_t i = 0; i < methods_vec->size(); ++i) {
7974       ArtMethod* conf_method = (*methods_vec)[i];
7975       ArtMethod& new_method = *out;
7976       new_method.CopyFrom(conf_method, pointer_size);
7977       // This is a type of default method (there are default method impls, just a conflict) so
7978       // mark this as a default. We use the `kAccAbstract` flag to distinguish it from invokable
7979       // copied default method without using a separate access flag but the default conflicting
7980       // method is technically not abstract and ArtMethod::IsAbstract() shall return false.
7981       // Also clear the kAccSkipAccessChecks bit since this class hasn't been verified yet it
7982       // shouldn't have methods that are skipping access checks. Also clear potential
7983       // kAccSingleImplementation to avoid CHA trying to inline the default method.
7984       uint32_t access_flags = new_method.GetAccessFlags();
7985       DCHECK_EQ(access_flags & kAccNative, 0u);
7986       DCHECK_EQ(access_flags & kAccIntrinsic, 0u);
7987       constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
7988       constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
7989       new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
7990       DCHECK(new_method.IsDefaultConflicting());
7991       DCHECK(!new_method.IsAbstract());
7992       // The actual method might or might not be marked abstract since we just copied it from a
7993       // (possibly default) interface method. We need to set it entry point to be the bridge so
7994       // that the compiler will not invoke the implementation of whatever method we copied from.
7995       EnsureThrowsInvocationError(class_linker_, &new_method);
7996       move_table_.emplace(conf_method, &new_method);
7997       // Update the entry in the method array, as the array will be used for future lookups,
7998       // where thread suspension is allowed.
7999       // As such, the array should not contain locally allocated ArtMethod, otherwise the GC
8000       // would not see them.
8001       (*methods_vec)[i] = &new_method;
8002       ++out;
8003     }
8004   }
8005   methods->SetSize(new_method_count);
8006   class_linker_->UpdateClassMethods(klass_.Get(), methods);
8007 }
8008 
UpdateVtable(const HashMap<size_t,ClassLinker::MethodTranslation> & default_translations,Handle<mirror::PointerArray> old_vtable)8009 ObjPtr<mirror::PointerArray> ClassLinker::LinkInterfaceMethodsHelper::UpdateVtable(
8010     const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
8011     Handle<mirror::PointerArray> old_vtable) {
8012   // Update the vtable to the new method structures. We can skip this for interfaces since they
8013   // do not have vtables.
8014   const size_t old_vtable_count = old_vtable->GetLength();
8015   const size_t new_vtable_count = old_vtable_count +
8016                                   miranda_methods_.size() +
8017                                   default_methods_.size() +
8018                                   default_conflict_methods_.size();
8019 
8020   ObjPtr<mirror::PointerArray> vtable = ObjPtr<mirror::PointerArray>::DownCast(
8021       mirror::Array::CopyOf(old_vtable, self_, new_vtable_count));
8022   if (UNLIKELY(vtable == nullptr)) {
8023     self_->AssertPendingOOMException();
8024     return nullptr;
8025   }
8026 
8027   size_t vtable_pos = old_vtable_count;
8028   PointerSize pointer_size = class_linker_->GetImagePointerSize();
8029   // Update all the newly copied method's indexes so they denote their placement in the vtable.
8030   for (const ScopedArenaVector<ArtMethod*>& methods_vec : {default_methods_,
8031                                                            default_conflict_methods_,
8032                                                            miranda_methods_}) {
8033     // These are the functions that are not already in the vtable!
8034     for (ArtMethod* new_vtable_method : methods_vec) {
8035       // Leave the declaring class alone the method's dex_code_item_offset_ and dex_method_index_
8036       // fields are references into the dex file the method was defined in. Since the ArtMethod
8037       // does not store that information it uses declaring_class_->dex_cache_.
8038       new_vtable_method->SetMethodIndex(0xFFFF & vtable_pos);
8039       vtable->SetElementPtrSize(vtable_pos, new_vtable_method, pointer_size);
8040       ++vtable_pos;
8041     }
8042   }
8043   DCHECK_EQ(vtable_pos, new_vtable_count);
8044 
8045   // Update old vtable methods. We use the default_translations map to figure out what each
8046   // vtable entry should be updated to, if they need to be at all.
8047   for (size_t i = 0; i < old_vtable_count; ++i) {
8048     ArtMethod* translated_method = vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
8049     // Try and find what we need to change this method to.
8050     auto translation_it = default_translations.find(i);
8051     if (translation_it != default_translations.end()) {
8052       if (translation_it->second.IsInConflict()) {
8053         // Find which conflict method we are to use for this method.
8054         MethodNameAndSignatureComparator old_method_comparator(
8055             translated_method->GetInterfaceMethodIfProxy(pointer_size));
8056         // We only need to look through overriding_default_conflict_methods since this is an
8057         // overridden method we are fixing up here.
8058         ArtMethod* new_conflict_method = FindSameNameAndSignature(
8059             old_method_comparator, overriding_default_conflict_methods_);
8060         CHECK(new_conflict_method != nullptr) << "Expected a conflict method!";
8061         translated_method = new_conflict_method;
8062       } else if (translation_it->second.IsAbstract()) {
8063         // Find which miranda method we are to use for this method.
8064         MethodNameAndSignatureComparator old_method_comparator(
8065             translated_method->GetInterfaceMethodIfProxy(pointer_size));
8066         ArtMethod* miranda_method = FindSameNameAndSignature(old_method_comparator,
8067                                                              miranda_methods_);
8068         DCHECK(miranda_method != nullptr);
8069         translated_method = miranda_method;
8070       } else {
8071         // Normal default method (changed from an older default or abstract interface method).
8072         DCHECK(translation_it->second.IsTranslation());
8073         translated_method = translation_it->second.GetTranslation();
8074         auto it = move_table_.find(translated_method);
8075         DCHECK(it != move_table_.end());
8076         translated_method = it->second;
8077       }
8078     } else {
8079       auto it = move_table_.find(translated_method);
8080       translated_method = (it != move_table_.end()) ? it->second : nullptr;
8081     }
8082 
8083     if (translated_method != nullptr) {
8084       // Make sure the new_methods index is set.
8085       if (translated_method->GetMethodIndexDuringLinking() != i) {
8086         if (kIsDebugBuild) {
8087           auto* methods = klass_->GetMethodsPtr();
8088           CHECK_LE(reinterpret_cast<uintptr_t>(&*methods->begin(method_size_, method_alignment_)),
8089                    reinterpret_cast<uintptr_t>(translated_method));
8090           CHECK_LT(reinterpret_cast<uintptr_t>(translated_method),
8091                    reinterpret_cast<uintptr_t>(&*methods->end(method_size_, method_alignment_)));
8092         }
8093         translated_method->SetMethodIndex(0xFFFF & i);
8094       }
8095       vtable->SetElementPtrSize(i, translated_method, pointer_size);
8096     }
8097   }
8098   klass_->SetVTable(vtable);
8099   return vtable;
8100 }
8101 
UpdateIfTable(Handle<mirror::IfTable> iftable)8102 void ClassLinker::LinkInterfaceMethodsHelper::UpdateIfTable(Handle<mirror::IfTable> iftable) {
8103   PointerSize pointer_size = class_linker_->GetImagePointerSize();
8104   const size_t ifcount = klass_->GetIfTableCount();
8105   // Go fix up all the stale iftable pointers.
8106   for (size_t i = 0; i < ifcount; ++i) {
8107     for (size_t j = 0, count = iftable->GetMethodArrayCount(i); j < count; ++j) {
8108       ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArray(i);
8109       ArtMethod* m = method_array->GetElementPtrSize<ArtMethod*>(j, pointer_size);
8110       DCHECK(m != nullptr) << klass_->PrettyClass();
8111       auto it = move_table_.find(m);
8112       if (it != move_table_.end()) {
8113         auto* new_m = it->second;
8114         DCHECK(new_m != nullptr) << klass_->PrettyClass();
8115         method_array->SetElementPtrSize(j, new_m, pointer_size);
8116       }
8117     }
8118   }
8119 }
8120 
UpdateIMT(ArtMethod ** out_imt)8121 void ClassLinker::LinkInterfaceMethodsHelper::UpdateIMT(ArtMethod** out_imt) {
8122   // Fix up IMT next.
8123   for (size_t i = 0; i < ImTable::kSize; ++i) {
8124     auto it = move_table_.find(out_imt[i]);
8125     if (it != move_table_.end()) {
8126       out_imt[i] = it->second;
8127     }
8128   }
8129 }
8130 
8131 // TODO This method needs to be split up into several smaller methods.
LinkInterfaceMethods(Thread * self,Handle<mirror::Class> klass,const HashMap<size_t,ClassLinker::MethodTranslation> & default_translations,bool * out_new_conflict,ArtMethod ** out_imt)8132 bool ClassLinker::LinkInterfaceMethods(
8133     Thread* self,
8134     Handle<mirror::Class> klass,
8135     const HashMap<size_t, ClassLinker::MethodTranslation>& default_translations,
8136     bool* out_new_conflict,
8137     ArtMethod** out_imt) {
8138   StackHandleScope<3> hs(self);
8139   Runtime* const runtime = Runtime::Current();
8140 
8141   const bool is_interface = klass->IsInterface();
8142   const bool has_superclass = klass->HasSuperClass();
8143   const bool fill_tables = !is_interface;
8144   const size_t super_ifcount = has_superclass ? klass->GetSuperClass()->GetIfTableCount() : 0U;
8145   const size_t ifcount = klass->GetIfTableCount();
8146 
8147   Handle<mirror::IfTable> iftable(hs.NewHandle(klass->GetIfTable()));
8148 
8149   MutableHandle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
8150   ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
8151   ArtMethod* const imt_conflict_method = runtime->GetImtConflictMethod();
8152   // Copy the IMT from the super class if possible.
8153   const bool extend_super_iftable = has_superclass;
8154   if (has_superclass && fill_tables) {
8155     FillImtFromSuperClass(klass,
8156                           unimplemented_method,
8157                           imt_conflict_method,
8158                           out_new_conflict,
8159                           out_imt);
8160   }
8161   // Allocate method arrays before since we don't want miss visiting miranda method roots due to
8162   // thread suspension.
8163   if (fill_tables) {
8164     if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8165       return false;
8166     }
8167   }
8168 
8169   LinkInterfaceMethodsHelper helper(this, klass, self, runtime);
8170 
8171   auto* old_cause = self->StartAssertNoThreadSuspension(
8172       "Copying ArtMethods for LinkInterfaceMethods");
8173   // Going in reverse to ensure that we will hit abstract methods that override defaults before the
8174   // defaults. This means we don't need to do any trickery when creating the Miranda methods, since
8175   // they will already be null. This has the additional benefit that the declarer of a miranda
8176   // method will actually declare an abstract method.
8177   for (size_t i = ifcount; i != 0u; ) {
8178     --i;
8179     DCHECK_LT(i, ifcount);
8180 
8181     size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8182     if (num_methods > 0) {
8183       StackHandleScope<2> hs2(self);
8184       const bool is_super = i < super_ifcount;
8185       const bool super_interface = is_super && extend_super_iftable;
8186       // We don't actually create or fill these tables for interfaces, we just copy some methods for
8187       // conflict methods. Just set this as nullptr in those cases.
8188       Handle<mirror::PointerArray> method_array(fill_tables
8189                                                 ? hs2.NewHandle(iftable->GetMethodArray(i))
8190                                                 : hs2.NewHandle<mirror::PointerArray>(nullptr));
8191 
8192       ArraySlice<ArtMethod> input_virtual_methods;
8193       ScopedNullHandle<mirror::PointerArray> null_handle;
8194       Handle<mirror::PointerArray> input_vtable_array(null_handle);
8195       int32_t input_array_length = 0;
8196 
8197       // TODO Cleanup Needed: In the presence of default methods this optimization is rather dirty
8198       //      and confusing. Default methods should always look through all the superclasses
8199       //      because they are the last choice of an implementation. We get around this by looking
8200       //      at the super-classes iftable methods (copied into method_array previously) when we are
8201       //      looking for the implementation of a super-interface method but that is rather dirty.
8202       bool using_virtuals;
8203       if (super_interface || is_interface) {
8204         // If we are overwriting a super class interface, try to only virtual methods instead of the
8205         // whole vtable.
8206         using_virtuals = true;
8207         input_virtual_methods = klass->GetDeclaredVirtualMethodsSlice(image_pointer_size_);
8208         input_array_length = input_virtual_methods.size();
8209       } else {
8210         // For a new interface, however, we need the whole vtable in case a new
8211         // interface method is implemented in the whole superclass.
8212         using_virtuals = false;
8213         DCHECK(vtable != nullptr);
8214         input_vtable_array = vtable;
8215         input_array_length = input_vtable_array->GetLength();
8216       }
8217 
8218       // For each method in interface
8219       for (size_t j = 0; j < num_methods; ++j) {
8220         auto* interface_method = iftable->GetInterface(i)->GetVirtualMethod(j, image_pointer_size_);
8221         MethodNameAndSignatureComparator interface_name_comparator(
8222             interface_method->GetInterfaceMethodIfProxy(image_pointer_size_));
8223         uint32_t imt_index = interface_method->GetImtIndex();
8224         ArtMethod** imt_ptr = &out_imt[imt_index];
8225         // For each method listed in the interface's method list, find the
8226         // matching method in our class's method list.  We want to favor the
8227         // subclass over the superclass, which just requires walking
8228         // back from the end of the vtable.  (This only matters if the
8229         // superclass defines a private method and this class redefines
8230         // it -- otherwise it would use the same vtable slot.  In .dex files
8231         // those don't end up in the virtual method table, so it shouldn't
8232         // matter which direction we go.  We walk it backward anyway.)
8233         //
8234         // To find defaults we need to do the same but also go over interfaces.
8235         bool found_impl = false;
8236         ArtMethod* vtable_impl = nullptr;
8237         for (int32_t k = input_array_length - 1; k >= 0; --k) {
8238           ArtMethod* vtable_method = using_virtuals ?
8239               &input_virtual_methods[k] :
8240               input_vtable_array->GetElementPtrSize<ArtMethod*>(k, image_pointer_size_);
8241           ArtMethod* vtable_method_for_name_comparison =
8242               vtable_method->GetInterfaceMethodIfProxy(image_pointer_size_);
8243           DCHECK(!vtable_method->IsStatic()) << vtable_method->PrettyMethod();
8244           if (interface_name_comparator.HasSameNameAndSignature(
8245               vtable_method_for_name_comparison)) {
8246             if (!vtable_method->IsAbstract() && !vtable_method->IsPublic()) {
8247               // Must do EndAssertNoThreadSuspension before throw since the throw can cause
8248               // allocations.
8249               self->EndAssertNoThreadSuspension(old_cause);
8250               ThrowIllegalAccessError(klass.Get(),
8251                   "Method '%s' implementing interface method '%s' is not public",
8252                   vtable_method->PrettyMethod().c_str(),
8253                   interface_method->PrettyMethod().c_str());
8254               return false;
8255             } else if (UNLIKELY(vtable_method->IsOverridableByDefaultMethod())) {
8256               // We might have a newer, better, default method for this, so we just skip it. If we
8257               // are still using this we will select it again when scanning for default methods. To
8258               // obviate the need to copy the method again we will make a note that we already found
8259               // a default here.
8260               // TODO This should be much cleaner.
8261               vtable_impl = vtable_method;
8262               break;
8263             } else {
8264               found_impl = true;
8265               if (LIKELY(fill_tables)) {
8266                 method_array->SetElementPtrSize(j, vtable_method, image_pointer_size_);
8267                 // Place method in imt if entry is empty, place conflict otherwise.
8268                 SetIMTRef(unimplemented_method,
8269                           imt_conflict_method,
8270                           vtable_method,
8271                           /*out*/out_new_conflict,
8272                           /*out*/imt_ptr);
8273               }
8274               break;
8275             }
8276           }
8277         }
8278         // Continue on to the next method if we are done.
8279         if (LIKELY(found_impl)) {
8280           continue;
8281         } else if (LIKELY(super_interface)) {
8282           // Don't look for a default implementation when the super-method is implemented directly
8283           // by the class.
8284           //
8285           // See if we can use the superclasses method and skip searching everything else.
8286           // Note: !found_impl && super_interface
8287           CHECK(extend_super_iftable);
8288           // If this is a super_interface method it is possible we shouldn't override it because a
8289           // superclass could have implemented it directly.  We get the method the superclass used
8290           // to implement this to know if we can override it with a default method. Doing this is
8291           // safe since we know that the super_iftable is filled in so we can simply pull it from
8292           // there. We don't bother if this is not a super-classes interface since in that case we
8293           // have scanned the entire vtable anyway and would have found it.
8294           // TODO This is rather dirty but it is faster than searching through the entire vtable
8295           //      every time.
8296           ArtMethod* supers_method =
8297               method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
8298           DCHECK(supers_method != nullptr);
8299           DCHECK(interface_name_comparator.HasSameNameAndSignature(supers_method));
8300           if (LIKELY(!supers_method->IsOverridableByDefaultMethod())) {
8301             // The method is not overridable by a default method (i.e. it is directly implemented
8302             // in some class). Therefore move onto the next interface method.
8303             continue;
8304           } else {
8305             // If the super-classes method is override-able by a default method we need to keep
8306             // track of it since though it is override-able it is not guaranteed to be 'overridden'.
8307             // If it turns out not to be overridden and we did not keep track of it we might add it
8308             // to the vtable twice, causing corruption (vtable entries having inconsistent and
8309             // illegal states, incorrect vtable size, and incorrect or inconsistent iftable entries)
8310             // in this class and any subclasses.
8311             DCHECK(vtable_impl == nullptr || vtable_impl == supers_method)
8312                 << "vtable_impl was " << ArtMethod::PrettyMethod(vtable_impl)
8313                 << " and not 'nullptr' or "
8314                 << supers_method->PrettyMethod()
8315                 << " as expected. IFTable appears to be corrupt!";
8316             vtable_impl = supers_method;
8317           }
8318         }
8319         // If we haven't found it yet we should search through the interfaces for default methods.
8320         ArtMethod* current_method = helper.FindMethod(interface_method,
8321                                                       interface_name_comparator,
8322                                                       vtable_impl);
8323         if (LIKELY(fill_tables)) {
8324           if (current_method == nullptr && !super_interface) {
8325             // We could not find an implementation for this method and since it is a brand new
8326             // interface we searched the entire vtable (and all default methods) for an
8327             // implementation but couldn't find one. We therefore need to make a miranda method.
8328             current_method = helper.GetOrCreateMirandaMethod(interface_method,
8329                                                              interface_name_comparator);
8330           }
8331 
8332           if (current_method != nullptr) {
8333             // We found a default method implementation. Record it in the iftable and IMT.
8334             method_array->SetElementPtrSize(j, current_method, image_pointer_size_);
8335             SetIMTRef(unimplemented_method,
8336                       imt_conflict_method,
8337                       current_method,
8338                       /*out*/out_new_conflict,
8339                       /*out*/imt_ptr);
8340           }
8341         }
8342       }  // For each method in interface end.
8343     }  // if (num_methods > 0)
8344   }  // For each interface.
8345   // TODO don't extend virtuals of interface unless necessary (when is it?).
8346   if (helper.HasNewVirtuals()) {
8347     LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8348     helper.ReallocMethods();  // No return value to check. Native allocation failure aborts.
8349     LengthPrefixedArray<ArtMethod>* methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8350 
8351     // Done copying methods, they are all roots in the class now, so we can end the no thread
8352     // suspension assert.
8353     self->EndAssertNoThreadSuspension(old_cause);
8354 
8355     if (fill_tables) {
8356       vtable.Assign(helper.UpdateVtable(default_translations, vtable));
8357       if (UNLIKELY(vtable == nullptr)) {
8358         // The helper has already called self->AssertPendingOOMException();
8359         return false;
8360       }
8361       helper.UpdateIfTable(iftable);
8362       helper.UpdateIMT(out_imt);
8363     }
8364 
8365     helper.CheckNoStaleMethodsInDexCache();
8366     helper.ClobberOldMethods(old_methods, methods);
8367   } else {
8368     self->EndAssertNoThreadSuspension(old_cause);
8369   }
8370   if (kIsDebugBuild && !is_interface) {
8371     CheckVTable(self, klass, image_pointer_size_);
8372   }
8373   return true;
8374 }
8375 
8376 class ClassLinker::LinkFieldsHelper {
8377  public:
8378   static bool LinkFields(ClassLinker* class_linker,
8379                          Thread* self,
8380                          Handle<mirror::Class> klass,
8381                          bool is_static,
8382                          size_t* class_size)
8383       REQUIRES_SHARED(Locks::mutator_lock_);
8384 
8385  private:
8386   enum class FieldTypeOrder : uint16_t;
8387   class FieldGaps;
8388 
8389   struct FieldTypeOrderAndIndex {
8390     FieldTypeOrder field_type_order;
8391     uint16_t field_index;
8392   };
8393 
8394   static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8395 
8396   template <size_t kSize>
8397   static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8398       REQUIRES_SHARED(Locks::mutator_lock_);
8399 };
8400 
8401 // We use the following order of field types for assigning offsets.
8402 // Some fields can be shuffled forward to fill gaps, see `ClassLinker::LinkFields()`.
8403 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
8404   kReference = 0u,
8405   kLong,
8406   kDouble,
8407   kInt,
8408   kFloat,
8409   kChar,
8410   kShort,
8411   kBoolean,
8412   kByte,
8413 
8414   kLast64BitType = kDouble,
8415   kLast32BitType = kFloat,
8416   kLast16BitType = kShort,
8417 };
8418 
8419 ALWAYS_INLINE
8420 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)8421 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
8422   switch (first_char) {
8423     case 'J':
8424       return FieldTypeOrder::kLong;
8425     case 'D':
8426       return FieldTypeOrder::kDouble;
8427     case 'I':
8428       return FieldTypeOrder::kInt;
8429     case 'F':
8430       return FieldTypeOrder::kFloat;
8431     case 'C':
8432       return FieldTypeOrder::kChar;
8433     case 'S':
8434       return FieldTypeOrder::kShort;
8435     case 'Z':
8436       return FieldTypeOrder::kBoolean;
8437     case 'B':
8438       return FieldTypeOrder::kByte;
8439     default:
8440       DCHECK(first_char == 'L' || first_char == '[') << first_char;
8441       return FieldTypeOrder::kReference;
8442   }
8443 }
8444 
8445 // Gaps where we can insert fields in object layout.
8446 class ClassLinker::LinkFieldsHelper::FieldGaps {
8447  public:
8448   template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)8449   ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8450     static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8451     if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8452       uint32_t gap_start = field_offset.Uint32Value();
8453       field_offset = MemberOffset(RoundUp(gap_start, kSize));
8454       AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8455     }
8456     return field_offset;
8457   }
8458 
8459   template <uint32_t kSize>
HasGap() const8460   bool HasGap() const {
8461     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8462     return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8463            (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8464            gap4_offset_ != kNoOffset;
8465   }
8466 
8467   template <uint32_t kSize>
ReleaseGap()8468   MemberOffset ReleaseGap() {
8469     static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8470     uint32_t result;
8471     if (kSize == 1u && gap1_offset_ != kNoOffset) {
8472       DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8473       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8474       result = gap1_offset_;
8475       gap1_offset_ = kNoOffset;
8476     } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8477       DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8478       result = gap2_offset_;
8479       gap2_offset_ = kNoOffset;
8480       if (kSize < 2u) {
8481         AddGaps<1u>(result + kSize, result + 2u);
8482       }
8483     } else {
8484       DCHECK_NE(gap4_offset_, kNoOffset);
8485       result = gap4_offset_;
8486       gap4_offset_ = kNoOffset;
8487       if (kSize < 4u) {
8488         AddGaps<kSize | 2u>(result + kSize, result + 4u);
8489       }
8490     }
8491     return MemberOffset(result);
8492   }
8493 
8494  private:
8495   template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)8496   void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8497     if ((kGapsToCheck & 1u) != 0u) {
8498       DCHECK_LT(gap_start, gap_end);
8499       DCHECK_ALIGNED(gap_end, 2u);
8500       if ((gap_start & 1u) != 0u) {
8501         DCHECK_EQ(gap1_offset_, kNoOffset);
8502         gap1_offset_ = gap_start;
8503         gap_start += 1u;
8504         if (kGapsToCheck == 1u || gap_start == gap_end) {
8505           DCHECK_EQ(gap_start, gap_end);
8506           return;
8507         }
8508       }
8509     }
8510 
8511     if ((kGapsToCheck & 2u) != 0u) {
8512       DCHECK_LT(gap_start, gap_end);
8513       DCHECK_ALIGNED(gap_start, 2u);
8514       DCHECK_ALIGNED(gap_end, 4u);
8515       if ((gap_start & 2u) != 0u) {
8516         DCHECK_EQ(gap2_offset_, kNoOffset);
8517         gap2_offset_ = gap_start;
8518         gap_start += 2u;
8519         if (kGapsToCheck <= 3u || gap_start == gap_end) {
8520           DCHECK_EQ(gap_start, gap_end);
8521           return;
8522         }
8523       }
8524     }
8525 
8526     if ((kGapsToCheck & 4u) != 0u) {
8527       DCHECK_LT(gap_start, gap_end);
8528       DCHECK_ALIGNED(gap_start, 4u);
8529       DCHECK_ALIGNED(gap_end, 8u);
8530       DCHECK_EQ(gap_start + 4u, gap_end);
8531       DCHECK_EQ(gap4_offset_, kNoOffset);
8532       gap4_offset_ = gap_start;
8533       return;
8534     }
8535 
8536     DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
8537         << " after checking " << kGapsToCheck;
8538   }
8539 
8540   static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
8541 
8542   uint32_t gap4_offset_ = kNoOffset;
8543   uint32_t gap2_offset_ = kNoOffset;
8544   uint32_t gap1_offset_ = kNoOffset;
8545 };
8546 
8547 template <size_t kSize>
8548 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)8549 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
8550                                                               MemberOffset field_offset) {
8551   DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
8552   DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
8553   field->SetOffset(field_offset);
8554   return MemberOffset(field_offset.Uint32Value() + kSize);
8555 }
8556 
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)8557 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
8558                                                Thread* self,
8559                                                Handle<mirror::Class> klass,
8560                                                bool is_static,
8561                                                size_t* class_size) {
8562   self->AllowThreadSuspension();
8563   const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
8564   LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8565       klass->GetIFieldsPtr();
8566 
8567   // Initialize field_offset
8568   MemberOffset field_offset(0);
8569   if (is_static) {
8570     field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
8571         class_linker->GetImagePointerSize());
8572   } else {
8573     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8574     if (super_class != nullptr) {
8575       CHECK(super_class->IsResolved())
8576           << klass->PrettyClass() << " " << super_class->PrettyClass();
8577       field_offset = MemberOffset(super_class->GetObjectSize());
8578     }
8579   }
8580 
8581   CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
8582 
8583   // we want a relatively stable order so that adding new fields
8584   // minimizes disruption of C++ version such as Class and Method.
8585   //
8586   // The overall sort order order is:
8587   // 1) All object reference fields, sorted alphabetically.
8588   // 2) All java long (64-bit) integer fields, sorted alphabetically.
8589   // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8590   // 4) All java int (32-bit) integer fields, sorted alphabetically.
8591   // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8592   // 6) All java char (16-bit) integer fields, sorted alphabetically.
8593   // 7) All java short (16-bit) integer fields, sorted alphabetically.
8594   // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8595   // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8596   //
8597   // (References are first to increase the chance of reference visiting
8598   // being able to take a fast path using a bitmap of references at the
8599   // start of the object, see `Class::reference_instance_offsets_`.)
8600   //
8601   // Once the fields are sorted in this order we will attempt to fill any gaps
8602   // that might be present in the memory layout of the structure.
8603   // Note that we shall not fill gaps between the superclass fields.
8604 
8605   // Collect fields and their "type order index" (see numbered points above).
8606   const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
8607       "Using plain ArtField references");
8608   constexpr size_t kStackBufferEntries = 64;  // Avoid allocations for small number of fields.
8609   FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
8610   std::vector<FieldTypeOrderAndIndex> heap_buffer;
8611   ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
8612   if (num_fields <= kStackBufferEntries) {
8613     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
8614   } else {
8615     heap_buffer.resize(num_fields);
8616     sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
8617   }
8618   size_t num_reference_fields = 0;
8619   size_t primitive_fields_start = num_fields;
8620   DCHECK_LE(num_fields, 1u << 16);
8621   for (size_t i = 0; i != num_fields; ++i) {
8622     ArtField* field = &fields->At(i);
8623     const char* descriptor = field->GetTypeDescriptor();
8624     FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
8625     uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
8626     // Insert references to the start, other fields to the end.
8627     DCHECK_LT(num_reference_fields, primitive_fields_start);
8628     if (field_type_order == FieldTypeOrder::kReference) {
8629       sorted_fields[num_reference_fields] = { field_type_order, field_index };
8630       ++num_reference_fields;
8631     } else {
8632       --primitive_fields_start;
8633       sorted_fields[primitive_fields_start] = { field_type_order, field_index };
8634     }
8635   }
8636   DCHECK_EQ(num_reference_fields, primitive_fields_start);
8637 
8638   // Reference fields are already sorted by field index (and dex field index).
8639   DCHECK(std::is_sorted(
8640       sorted_fields.begin(),
8641       sorted_fields.begin() + num_reference_fields,
8642       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8643         ArtField* lhs_field = &fields->At(lhs.field_index);
8644         ArtField* rhs_field = &fields->At(rhs.field_index);
8645         CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8646         CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8647         CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
8648                  lhs.field_index < rhs.field_index);
8649         return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
8650       }));
8651   // Primitive fields were stored in reverse order of their field index (and dex field index).
8652   DCHECK(std::is_sorted(
8653       sorted_fields.begin() + primitive_fields_start,
8654       sorted_fields.end(),
8655       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8656         ArtField* lhs_field = &fields->At(lhs.field_index);
8657         ArtField* rhs_field = &fields->At(rhs.field_index);
8658         CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8659         CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8660         CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
8661                  lhs.field_index > rhs.field_index);
8662         return lhs.field_index > rhs.field_index;
8663       }));
8664   // Sort the primitive fields by the field type order, then field index.
8665   std::sort(sorted_fields.begin() + primitive_fields_start,
8666             sorted_fields.end(),
8667             [](const auto& lhs, const auto& rhs) {
8668               if (lhs.field_type_order != rhs.field_type_order) {
8669                 return lhs.field_type_order < rhs.field_type_order;
8670               } else {
8671                 return lhs.field_index < rhs.field_index;
8672               }
8673             });
8674   // Primitive fields are now sorted by field size (descending), then type, then field index.
8675   DCHECK(std::is_sorted(
8676       sorted_fields.begin() + primitive_fields_start,
8677       sorted_fields.end(),
8678       [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8679         ArtField* lhs_field = &fields->At(lhs.field_index);
8680         ArtField* rhs_field = &fields->At(rhs.field_index);
8681         Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
8682         CHECK_NE(lhs_type, Primitive::kPrimNot);
8683         Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
8684         CHECK_NE(rhs_type, Primitive::kPrimNot);
8685         if (lhs_type != rhs_type) {
8686           size_t lhs_size = Primitive::ComponentSize(lhs_type);
8687           size_t rhs_size = Primitive::ComponentSize(rhs_type);
8688           return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
8689         } else {
8690           return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
8691         }
8692       }));
8693 
8694   // Process reference fields.
8695   FieldGaps field_gaps;
8696   size_t index = 0u;
8697   if (num_reference_fields != 0u) {
8698     constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
8699     field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
8700     for (; index != num_reference_fields; ++index) {
8701       ArtField* field = &fields->At(sorted_fields[index].field_index);
8702       field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
8703     }
8704   }
8705   // Process 64-bit fields.
8706   if (index != num_fields &&
8707       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8708     field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
8709     while (index != num_fields &&
8710            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
8711       ArtField* field = &fields->At(sorted_fields[index].field_index);
8712       field_offset = AssignFieldOffset<8u>(field, field_offset);
8713       ++index;
8714     }
8715   }
8716   // Process 32-bit fields.
8717   if (index != num_fields &&
8718       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8719     field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
8720     if (field_gaps.HasGap<4u>()) {
8721       ArtField* field = &fields->At(sorted_fields[index].field_index);
8722       AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>());  // Ignore return value.
8723       ++index;
8724       DCHECK(!field_gaps.HasGap<4u>());  // There can be only one gap for a 32-bit field.
8725     }
8726     while (index != num_fields &&
8727            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
8728       ArtField* field = &fields->At(sorted_fields[index].field_index);
8729       field_offset = AssignFieldOffset<4u>(field, field_offset);
8730       ++index;
8731     }
8732   }
8733   // Process 16-bit fields.
8734   if (index != num_fields &&
8735       sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8736     field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
8737     while (index != num_fields &&
8738            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
8739            field_gaps.HasGap<2u>()) {
8740       ArtField* field = &fields->At(sorted_fields[index].field_index);
8741       AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>());  // Ignore return value.
8742       ++index;
8743     }
8744     while (index != num_fields &&
8745            sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
8746       ArtField* field = &fields->At(sorted_fields[index].field_index);
8747       field_offset = AssignFieldOffset<2u>(field, field_offset);
8748       ++index;
8749     }
8750   }
8751   // Process 8-bit fields.
8752   for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
8753     ArtField* field = &fields->At(sorted_fields[index].field_index);
8754     AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>());  // Ignore return value.
8755   }
8756   for (; index != num_fields; ++index) {
8757     ArtField* field = &fields->At(sorted_fields[index].field_index);
8758     field_offset = AssignFieldOffset<1u>(field, field_offset);
8759   }
8760 
8761   self->EndAssertNoThreadSuspension(old_no_suspend_cause);
8762 
8763   // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
8764   DCHECK(!class_linker->init_done_ || !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
8765   if (!is_static &&
8766       UNLIKELY(!class_linker->init_done_) &&
8767       klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
8768     // We know there are no non-reference fields in the Reference classes, and we know
8769     // that 'referent' is alphabetically last, so this is easy...
8770     CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
8771     CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
8772         << klass->PrettyClass();
8773     --num_reference_fields;
8774   }
8775 
8776   size_t size = field_offset.Uint32Value();
8777   // Update klass
8778   if (is_static) {
8779     klass->SetNumReferenceStaticFields(num_reference_fields);
8780     *class_size = size;
8781   } else {
8782     klass->SetNumReferenceInstanceFields(num_reference_fields);
8783     ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8784     if (num_reference_fields == 0 || super_class == nullptr) {
8785       // object has one reference field, klass, but we ignore it since we always visit the class.
8786       // super_class is null iff the class is java.lang.Object.
8787       if (super_class == nullptr ||
8788           (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
8789         klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
8790       }
8791     }
8792     if (kIsDebugBuild) {
8793       DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
8794       size_t total_reference_instance_fields = 0;
8795       ObjPtr<mirror::Class> cur_super = klass.Get();
8796       while (cur_super != nullptr) {
8797         total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
8798         cur_super = cur_super->GetSuperClass();
8799       }
8800       if (super_class == nullptr) {
8801         CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
8802       } else {
8803         // Check that there is at least num_reference_fields other than Object.class.
8804         CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
8805             << klass->PrettyClass();
8806       }
8807     }
8808     if (!klass->IsVariableSize()) {
8809       std::string temp;
8810       DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
8811       size_t previous_size = klass->GetObjectSize();
8812       if (previous_size != 0) {
8813         // Make sure that we didn't originally have an incorrect size.
8814         CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
8815       }
8816       klass->SetObjectSize(size);
8817     }
8818   }
8819 
8820   if (kIsDebugBuild) {
8821     // Make sure that the fields array is ordered by name but all reference
8822     // offsets are at the beginning as far as alignment allows.
8823     MemberOffset start_ref_offset = is_static
8824         ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
8825         : klass->GetFirstReferenceInstanceFieldOffset();
8826     MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
8827                                 num_reference_fields *
8828                                     sizeof(mirror::HeapReference<mirror::Object>));
8829     MemberOffset current_ref_offset = start_ref_offset;
8830     for (size_t i = 0; i < num_fields; i++) {
8831       ArtField* field = &fields->At(i);
8832       VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
8833           << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
8834           << " offset=" << field->GetOffsetDuringLinking();
8835       if (i != 0) {
8836         ArtField* const prev_field = &fields->At(i - 1);
8837         // NOTE: The field names can be the same. This is not possible in the Java language
8838         // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
8839         DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
8840       }
8841       Primitive::Type type = field->GetTypeAsPrimitiveType();
8842       bool is_primitive = type != Primitive::kPrimNot;
8843       if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
8844           strcmp("referent", field->GetName()) == 0) {
8845         is_primitive = true;  // We lied above, so we have to expect a lie here.
8846       }
8847       MemberOffset offset = field->GetOffsetDuringLinking();
8848       if (is_primitive) {
8849         if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
8850           // Shuffled before references.
8851           size_t type_size = Primitive::ComponentSize(type);
8852           CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
8853           CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
8854           CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
8855           CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
8856         }
8857       } else {
8858         CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
8859         current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
8860                                           sizeof(mirror::HeapReference<mirror::Object>));
8861       }
8862     }
8863     CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
8864   }
8865   return true;
8866 }
8867 
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)8868 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
8869   CHECK(klass != nullptr);
8870   return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
8871 }
8872 
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)8873 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
8874   CHECK(klass != nullptr);
8875   return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
8876 }
8877 
8878 //  Set the bitmap of reference instance field offsets.
CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)8879 void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
8880   uint32_t reference_offsets = 0;
8881   ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
8882   // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
8883   if (super_class != nullptr) {
8884     reference_offsets = super_class->GetReferenceInstanceOffsets();
8885     // Compute reference offsets unless our superclass overflowed.
8886     if (reference_offsets != mirror::Class::kClassWalkSuper) {
8887       size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
8888       if (num_reference_fields != 0u) {
8889         // All of the fields that contain object references are guaranteed be grouped in memory
8890         // starting at an appropriately aligned address after super class object data.
8891         uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
8892                                         sizeof(mirror::HeapReference<mirror::Object>));
8893         uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
8894             sizeof(mirror::HeapReference<mirror::Object>);
8895         if (start_bit + num_reference_fields > 32) {
8896           reference_offsets = mirror::Class::kClassWalkSuper;
8897         } else {
8898           reference_offsets |= (0xffffffffu << start_bit) &
8899                                (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
8900         }
8901       }
8902     }
8903   }
8904   klass->SetReferenceInstanceOffsets(reference_offsets);
8905 }
8906 
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)8907 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8908                                                     ObjPtr<mirror::DexCache> dex_cache) {
8909   StackHandleScope<1> hs(Thread::Current());
8910   Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
8911   return DoResolveString(string_idx, h_dex_cache);
8912 }
8913 
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)8914 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
8915                                                     Handle<mirror::DexCache> dex_cache) {
8916   const DexFile& dex_file = *dex_cache->GetDexFile();
8917   uint32_t utf16_length;
8918   const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
8919   ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
8920   if (string != nullptr) {
8921     dex_cache->SetResolvedString(string_idx, string);
8922   }
8923   return string;
8924 }
8925 
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)8926 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
8927                                                    ObjPtr<mirror::DexCache> dex_cache) {
8928   DCHECK(dex_cache != nullptr);
8929   const DexFile& dex_file = *dex_cache->GetDexFile();
8930   uint32_t utf16_length;
8931   const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
8932   ObjPtr<mirror::String> string =
8933       intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
8934   if (string != nullptr) {
8935     dex_cache->SetResolvedString(string_idx, string);
8936   }
8937   return string;
8938 }
8939 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)8940 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
8941                                                         ObjPtr<mirror::Class> referrer) {
8942   return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
8943 }
8944 
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)8945 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
8946                                                         ObjPtr<mirror::DexCache> dex_cache,
8947                                                         ObjPtr<mirror::ClassLoader> class_loader) {
8948   const DexFile& dex_file = *dex_cache->GetDexFile();
8949   const char* descriptor = dex_file.StringByTypeIdx(type_idx);
8950   ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
8951   if (type != nullptr) {
8952     DCHECK(type->IsResolved());
8953     dex_cache->SetResolvedType(type_idx, type);
8954   }
8955   return type;
8956 }
8957 
LookupResolvedType(const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)8958 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
8959                                                       ObjPtr<mirror::ClassLoader> class_loader) {
8960   DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
8961   ObjPtr<mirror::Class> type = nullptr;
8962   if (descriptor[1] == '\0') {
8963     // only the descriptors of primitive types should be 1 character long, also avoid class lookup
8964     // for primitive classes that aren't backed by dex files.
8965     type = LookupPrimitiveClass(descriptor[0]);
8966   } else {
8967     Thread* const self = Thread::Current();
8968     DCHECK(self != nullptr);
8969     const size_t hash = ComputeModifiedUtf8Hash(descriptor);
8970     // Find the class in the loaded classes table.
8971     type = LookupClass(self, descriptor, hash, class_loader);
8972   }
8973   return (type != nullptr && type->IsResolved()) ? type : nullptr;
8974 }
8975 
8976 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)8977 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
8978   StackHandleScope<2> hs(Thread::Current());
8979   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
8980   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
8981   return DoResolveType(type_idx, dex_cache, class_loader);
8982 }
8983 
8984 // Instantiate the above.
8985 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8986                                                           ArtField* referrer);
8987 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8988                                                           ArtMethod* referrer);
8989 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8990                                                           ObjPtr<mirror::Class> referrer);
8991 
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)8992 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
8993                                                  Handle<mirror::DexCache> dex_cache,
8994                                                  Handle<mirror::ClassLoader> class_loader) {
8995   Thread* self = Thread::Current();
8996   const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
8997   ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
8998   if (resolved != nullptr) {
8999     // TODO: we used to throw here if resolved's class loader was not the
9000     //       boot class loader. This was to permit different classes with the
9001     //       same name to be loaded simultaneously by different loaders
9002     dex_cache->SetResolvedType(type_idx, resolved);
9003   } else {
9004     CHECK(self->IsExceptionPending())
9005         << "Expected pending exception for failed resolution of: " << descriptor;
9006     // Convert a ClassNotFoundException to a NoClassDefFoundError.
9007     StackHandleScope<1> hs(self);
9008     Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
9009     if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
9010       DCHECK(resolved == nullptr);  // No Handle needed to preserve resolved.
9011       self->ClearException();
9012       ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
9013       self->GetException()->SetCause(cause.Get());
9014     }
9015   }
9016   DCHECK((resolved == nullptr) || resolved->IsResolved())
9017       << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
9018   return resolved;
9019 }
9020 
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9021 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
9022                                            ObjPtr<mirror::DexCache> dex_cache,
9023                                            ObjPtr<mirror::ClassLoader> class_loader,
9024                                            uint32_t method_idx) {
9025   // Search for the method using dex_cache and method_idx. The Class::Find*Method()
9026   // functions can optimize the search if the dex_cache is the same as the DexCache
9027   // of the class, with fall-back to name and signature search otherwise.
9028   ArtMethod* resolved = nullptr;
9029   if (klass->IsInterface()) {
9030     resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9031   } else {
9032     resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9033   }
9034   DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
9035   if (resolved != nullptr &&
9036       // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9037       // access, as we'll be looking if the method can be accessed through an
9038       // interface.
9039       hiddenapi::ShouldDenyAccessToMember(resolved,
9040                                           hiddenapi::AccessContext(class_loader, dex_cache),
9041                                           hiddenapi::AccessMethod::kNone)) {
9042     // The resolved method that we have found cannot be accessed due to
9043     // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9044     // method). Try to find an interface method from the implemented interfaces which is
9045     // part of the SDK.
9046     ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
9047     if (itf_method == nullptr) {
9048       // No interface method. Call ShouldDenyAccessToMember again but this time
9049       // with AccessMethod::kLinking to ensure that an appropriate warning is
9050       // logged.
9051       hiddenapi::ShouldDenyAccessToMember(resolved,
9052                                           hiddenapi::AccessContext(class_loader, dex_cache),
9053                                           hiddenapi::AccessMethod::kLinking);
9054       resolved = nullptr;
9055     } else {
9056       // We found an interface method that is accessible, continue with the resolved method.
9057     }
9058   }
9059   if (resolved != nullptr) {
9060     // In case of jmvti, the dex file gets verified before being registered, so first
9061     // check if it's registered before checking class tables.
9062     const DexFile& dex_file = *dex_cache->GetDexFile();
9063     DCHECK(!IsDexFileRegistered(Thread::Current(), dex_file) ||
9064            FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
9065         << "DexFile referrer: " << dex_file.GetLocation()
9066         << " ClassLoader: " << DescribeLoaders(class_loader, "");
9067     // Be a good citizen and update the dex cache to speed subsequent calls.
9068     dex_cache->SetResolvedMethod(method_idx, resolved);
9069     // Disable the following invariant check as the verifier breaks it. b/73760543
9070     // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9071     // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9072     //    << "Method: " << resolved->PrettyMethod() << ", "
9073     //    << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9074     //    << "DexFile referrer: " << dex_file.GetLocation();
9075   }
9076   return resolved;
9077 }
9078 
9079 // Returns true if `method` is either null or hidden.
9080 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9081 static bool CheckNoSuchMethod(ArtMethod* method,
9082                               ObjPtr<mirror::DexCache> dex_cache,
9083                               ObjPtr<mirror::ClassLoader> class_loader)
9084       REQUIRES_SHARED(Locks::mutator_lock_) {
9085   return method == nullptr ||
9086          hiddenapi::ShouldDenyAccessToMember(method,
9087                                              hiddenapi::AccessContext(class_loader, dex_cache),
9088                                              hiddenapi::AccessMethod::kNone);  // no warnings
9089 }
9090 
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)9091 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9092                                                ObjPtr<mirror::DexCache> dex_cache,
9093                                                ObjPtr<mirror::ClassLoader> class_loader,
9094                                                uint32_t method_idx) {
9095   if (klass->IsInterface()) {
9096     ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9097     return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9098   } else {
9099     // If there was an interface method with the same signature, we would have
9100     // found it in the "copied" methods. Only DCHECK that the interface method
9101     // really does not exist.
9102     if (kIsDebugBuild) {
9103       ArtMethod* method =
9104           klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9105       DCHECK(CheckNoSuchMethod(method, dex_cache, class_loader));
9106     }
9107     return nullptr;
9108   }
9109 }
9110 
9111 template <ClassLinker::ResolveMode kResolveMode>
ResolveMethod(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,ArtMethod * referrer,InvokeType type)9112 ArtMethod* ClassLinker::ResolveMethod(uint32_t method_idx,
9113                                       Handle<mirror::DexCache> dex_cache,
9114                                       Handle<mirror::ClassLoader> class_loader,
9115                                       ArtMethod* referrer,
9116                                       InvokeType type) {
9117   DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
9118   DCHECK(dex_cache != nullptr);
9119   DCHECK(referrer == nullptr || !referrer->IsProxyMethod());
9120   // Check for hit in the dex cache.
9121   ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
9122   Thread::PoisonObjectPointersIfDebug();
9123   DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
9124   bool valid_dex_cache_method = resolved != nullptr;
9125   if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
9126     // We have a valid method from the DexCache and no checks to perform.
9127     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9128     return resolved;
9129   }
9130   const DexFile& dex_file = *dex_cache->GetDexFile();
9131   const dex::MethodId& method_id = dex_file.GetMethodId(method_idx);
9132   ObjPtr<mirror::Class> klass = nullptr;
9133   if (valid_dex_cache_method) {
9134     // We have a valid method from the DexCache but we need to perform ICCE and IAE checks.
9135     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9136     klass = LookupResolvedType(method_id.class_idx_, dex_cache.Get(), class_loader.Get());
9137     if (UNLIKELY(klass == nullptr)) {
9138       // We normaly should not end up here. However the verifier currently doesn't guarantee
9139       // the invariant of having the klass in the class table. b/73760543
9140       klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
9141       if (klass == nullptr) {
9142         // This can only happen if the current thread is not allowed to load
9143         // classes.
9144         DCHECK(!Thread::Current()->CanLoadClasses());
9145         DCHECK(Thread::Current()->IsExceptionPending());
9146         return nullptr;
9147       }
9148     }
9149   } else {
9150     // The method was not in the DexCache, resolve the declaring class.
9151     klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
9152     if (klass == nullptr) {
9153       DCHECK(Thread::Current()->IsExceptionPending());
9154       return nullptr;
9155     }
9156   }
9157 
9158   // Check if the invoke type matches the class type.
9159   if (kResolveMode == ResolveMode::kCheckICCEAndIAE &&
9160       CheckInvokeClassMismatch</* kThrow= */ true>(
9161           dex_cache.Get(), type, [klass]() { return klass; })) {
9162     DCHECK(Thread::Current()->IsExceptionPending());
9163     return nullptr;
9164   }
9165 
9166   if (!valid_dex_cache_method) {
9167     resolved = FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
9168   }
9169 
9170   // Note: We can check for IllegalAccessError only if we have a referrer.
9171   if (kResolveMode == ResolveMode::kCheckICCEAndIAE && resolved != nullptr && referrer != nullptr) {
9172     ObjPtr<mirror::Class> methods_class = resolved->GetDeclaringClass();
9173     ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9174     if (!referring_class->CheckResolvedMethodAccess(methods_class,
9175                                                     resolved,
9176                                                     dex_cache.Get(),
9177                                                     method_idx,
9178                                                     type)) {
9179       DCHECK(Thread::Current()->IsExceptionPending());
9180       return nullptr;
9181     }
9182   }
9183 
9184   // If we found a method, check for incompatible class changes.
9185   if (LIKELY(resolved != nullptr) &&
9186       LIKELY(kResolveMode == ResolveMode::kNoChecks ||
9187              !resolved->CheckIncompatibleClassChange(type))) {
9188     return resolved;
9189   } else {
9190     // If we had a method, or if we can find one with another lookup type,
9191     // it's an incompatible-class-change error.
9192     if (resolved == nullptr) {
9193       resolved = FindIncompatibleMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
9194     }
9195     if (resolved != nullptr) {
9196       ThrowIncompatibleClassChangeError(type, resolved->GetInvokeType(), resolved, referrer);
9197     } else {
9198       // We failed to find the method (using all lookup types), so throw a NoSuchMethodError.
9199       const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
9200       const Signature signature = dex_file.GetMethodSignature(method_id);
9201       ThrowNoSuchMethodError(type, klass, name, signature);
9202     }
9203     Thread::Current()->AssertPendingException();
9204     return nullptr;
9205   }
9206 }
9207 
ResolveMethodWithoutInvokeType(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9208 ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
9209                                                        Handle<mirror::DexCache> dex_cache,
9210                                                        Handle<mirror::ClassLoader> class_loader) {
9211   ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
9212   Thread::PoisonObjectPointersIfDebug();
9213   if (resolved != nullptr) {
9214     DCHECK(!resolved->IsRuntimeMethod());
9215     DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9216     return resolved;
9217   }
9218   // Fail, get the declaring class.
9219   const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
9220   ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
9221   if (klass == nullptr) {
9222     Thread::Current()->AssertPendingException();
9223     return nullptr;
9224   }
9225   if (klass->IsInterface()) {
9226     resolved = klass->FindInterfaceMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9227   } else {
9228     resolved = klass->FindClassMethod(dex_cache.Get(), method_idx, image_pointer_size_);
9229   }
9230   if (resolved != nullptr &&
9231       hiddenapi::ShouldDenyAccessToMember(
9232           resolved,
9233           hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
9234           hiddenapi::AccessMethod::kLinking)) {
9235     resolved = nullptr;
9236   }
9237   return resolved;
9238 }
9239 
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)9240 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9241                                            ObjPtr<mirror::DexCache> dex_cache,
9242                                            ObjPtr<mirror::ClassLoader> class_loader,
9243                                            bool is_static) {
9244   const DexFile& dex_file = *dex_cache->GetDexFile();
9245   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9246   ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9247   if (klass == nullptr) {
9248     klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
9249   }
9250   if (klass == nullptr) {
9251     // The class has not been resolved yet, so the field is also unresolved.
9252     return nullptr;
9253   }
9254   DCHECK(klass->IsResolved());
9255 
9256   return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
9257 }
9258 
ResolveField(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,bool is_static)9259 ArtField* ClassLinker::ResolveField(uint32_t field_idx,
9260                                     Handle<mirror::DexCache> dex_cache,
9261                                     Handle<mirror::ClassLoader> class_loader,
9262                                     bool is_static) {
9263   DCHECK(dex_cache != nullptr);
9264   DCHECK(!Thread::Current()->IsExceptionPending()) << Thread::Current()->GetException()->Dump();
9265   ArtField* resolved = dex_cache->GetResolvedField(field_idx);
9266   Thread::PoisonObjectPointersIfDebug();
9267   if (resolved != nullptr) {
9268     return resolved;
9269   }
9270   const DexFile& dex_file = *dex_cache->GetDexFile();
9271   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9272   ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
9273   if (klass == nullptr) {
9274     DCHECK(Thread::Current()->IsExceptionPending());
9275     return nullptr;
9276   }
9277 
9278   resolved = FindResolvedField(klass, dex_cache.Get(), class_loader.Get(), field_idx, is_static);
9279   if (resolved == nullptr) {
9280     const char* name = dex_file.GetFieldName(field_id);
9281     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9282     ThrowNoSuchFieldError(is_static ? "static " : "instance ", klass, type, name);
9283   }
9284   return resolved;
9285 }
9286 
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9287 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
9288                                        Handle<mirror::DexCache> dex_cache,
9289                                        Handle<mirror::ClassLoader> class_loader) {
9290   DCHECK(dex_cache != nullptr);
9291   ArtField* resolved = dex_cache->GetResolvedField(field_idx);
9292   Thread::PoisonObjectPointersIfDebug();
9293   if (resolved != nullptr) {
9294     return resolved;
9295   }
9296   const DexFile& dex_file = *dex_cache->GetDexFile();
9297   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9298   ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
9299   if (klass == nullptr) {
9300     DCHECK(Thread::Current()->IsExceptionPending());
9301     return nullptr;
9302   }
9303 
9304   resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9305   if (resolved == nullptr) {
9306     const char* name = dex_file.GetFieldName(field_id);
9307     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9308     ThrowNoSuchFieldError("", klass, type, name);
9309   }
9310   return resolved;
9311 }
9312 
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)9313 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9314                                          ObjPtr<mirror::DexCache> dex_cache,
9315                                          ObjPtr<mirror::ClassLoader> class_loader,
9316                                          uint32_t field_idx,
9317                                          bool is_static) {
9318   ArtField* resolved = nullptr;
9319   Thread* self = is_static ? Thread::Current() : nullptr;
9320   const DexFile& dex_file = *dex_cache->GetDexFile();
9321 
9322   resolved = is_static ? mirror::Class::FindStaticField(self, klass, dex_cache, field_idx)
9323                        : klass->FindInstanceField(dex_cache, field_idx);
9324 
9325   if (resolved == nullptr) {
9326     const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9327     const char* name = dex_file.GetFieldName(field_id);
9328     const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9329     resolved = is_static ? mirror::Class::FindStaticField(self, klass, name, type)
9330                          : klass->FindInstanceField(name, type);
9331   }
9332 
9333   if (resolved != nullptr &&
9334       hiddenapi::ShouldDenyAccessToMember(resolved,
9335                                           hiddenapi::AccessContext(class_loader, dex_cache),
9336                                           hiddenapi::AccessMethod::kLinking)) {
9337     resolved = nullptr;
9338   }
9339 
9340   if (resolved != nullptr) {
9341     dex_cache->SetResolvedField(field_idx, resolved);
9342   }
9343 
9344   return resolved;
9345 }
9346 
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)9347 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9348                                             ObjPtr<mirror::DexCache> dex_cache,
9349                                             ObjPtr<mirror::ClassLoader> class_loader,
9350                                             uint32_t field_idx) {
9351   ArtField* resolved = nullptr;
9352   Thread* self = Thread::Current();
9353   const DexFile& dex_file = *dex_cache->GetDexFile();
9354   const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
9355 
9356   const char* name = dex_file.GetFieldName(field_id);
9357   const char* type = dex_file.GetFieldTypeDescriptor(field_id);
9358   resolved = mirror::Class::FindField(self, klass, name, type);
9359 
9360   if (resolved != nullptr &&
9361       hiddenapi::ShouldDenyAccessToMember(resolved,
9362                                           hiddenapi::AccessContext(class_loader, dex_cache),
9363                                           hiddenapi::AccessMethod::kLinking)) {
9364     resolved = nullptr;
9365   }
9366 
9367   if (resolved != nullptr) {
9368     dex_cache->SetResolvedField(field_idx, resolved);
9369   }
9370 
9371   return resolved;
9372 }
9373 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9374 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9375     Thread* self,
9376     dex::ProtoIndex proto_idx,
9377     Handle<mirror::DexCache> dex_cache,
9378     Handle<mirror::ClassLoader> class_loader) {
9379   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
9380   DCHECK(dex_cache != nullptr);
9381 
9382   ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
9383   if (resolved != nullptr) {
9384     return resolved;
9385   }
9386 
9387   StackHandleScope<4> hs(self);
9388 
9389   // First resolve the return type.
9390   const DexFile& dex_file = *dex_cache->GetDexFile();
9391   const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
9392   Handle<mirror::Class> return_type(hs.NewHandle(
9393       ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
9394   if (return_type == nullptr) {
9395     DCHECK(self->IsExceptionPending());
9396     return nullptr;
9397   }
9398 
9399   // Then resolve the argument types.
9400   //
9401   // TODO: Is there a better way to figure out the number of method arguments
9402   // other than by looking at the shorty ?
9403   const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9404 
9405   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9406   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9407       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
9408   if (method_params == nullptr) {
9409     DCHECK(self->IsExceptionPending());
9410     return nullptr;
9411   }
9412 
9413   DexFileParameterIterator it(dex_file, proto_id);
9414   int32_t i = 0;
9415   MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9416   for (; it.HasNext(); it.Next()) {
9417     const dex::TypeIndex type_idx = it.GetTypeIdx();
9418     param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
9419     if (param_class == nullptr) {
9420       DCHECK(self->IsExceptionPending());
9421       return nullptr;
9422     }
9423 
9424     method_params->Set(i++, param_class.Get());
9425   }
9426 
9427   DCHECK(!it.HasNext());
9428 
9429   Handle<mirror::MethodType> type = hs.NewHandle(
9430       mirror::MethodType::Create(self, return_type, method_params));
9431   dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9432 
9433   return type.Get();
9434 }
9435 
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)9436 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
9437                                                           dex::ProtoIndex proto_idx,
9438                                                           ArtMethod* referrer) {
9439   StackHandleScope<2> hs(self);
9440   Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9441   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9442   return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
9443 }
9444 
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)9445 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
9446     Thread* self,
9447     const dex::MethodHandleItem& method_handle,
9448     ArtMethod* referrer) {
9449   DexFile::MethodHandleType handle_type =
9450       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9451   mirror::MethodHandle::Kind kind;
9452   bool is_put;
9453   bool is_static;
9454   int32_t num_params;
9455   switch (handle_type) {
9456     case DexFile::MethodHandleType::kStaticPut: {
9457       kind = mirror::MethodHandle::Kind::kStaticPut;
9458       is_put = true;
9459       is_static = true;
9460       num_params = 1;
9461       break;
9462     }
9463     case DexFile::MethodHandleType::kStaticGet: {
9464       kind = mirror::MethodHandle::Kind::kStaticGet;
9465       is_put = false;
9466       is_static = true;
9467       num_params = 0;
9468       break;
9469     }
9470     case DexFile::MethodHandleType::kInstancePut: {
9471       kind = mirror::MethodHandle::Kind::kInstancePut;
9472       is_put = true;
9473       is_static = false;
9474       num_params = 2;
9475       break;
9476     }
9477     case DexFile::MethodHandleType::kInstanceGet: {
9478       kind = mirror::MethodHandle::Kind::kInstanceGet;
9479       is_put = false;
9480       is_static = false;
9481       num_params = 1;
9482       break;
9483     }
9484     case DexFile::MethodHandleType::kInvokeStatic:
9485     case DexFile::MethodHandleType::kInvokeInstance:
9486     case DexFile::MethodHandleType::kInvokeConstructor:
9487     case DexFile::MethodHandleType::kInvokeDirect:
9488     case DexFile::MethodHandleType::kInvokeInterface:
9489       UNREACHABLE();
9490   }
9491 
9492   ArtField* target_field =
9493       ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9494   if (LIKELY(target_field != nullptr)) {
9495     ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9496     ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9497     if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9498       ThrowIllegalAccessErrorField(referring_class, target_field);
9499       return nullptr;
9500     }
9501     if (UNLIKELY(is_put && target_field->IsFinal())) {
9502       ThrowIllegalAccessErrorField(referring_class, target_field);
9503       return nullptr;
9504     }
9505   } else {
9506     DCHECK(Thread::Current()->IsExceptionPending());
9507     return nullptr;
9508   }
9509 
9510   StackHandleScope<4> hs(self);
9511   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9512   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9513       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9514   if (UNLIKELY(method_params == nullptr)) {
9515     DCHECK(self->IsExceptionPending());
9516     return nullptr;
9517   }
9518 
9519   Handle<mirror::Class> constructor_class;
9520   Handle<mirror::Class> return_type;
9521   switch (handle_type) {
9522     case DexFile::MethodHandleType::kStaticPut: {
9523       method_params->Set(0, target_field->ResolveType());
9524       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
9525       break;
9526     }
9527     case DexFile::MethodHandleType::kStaticGet: {
9528       return_type = hs.NewHandle(target_field->ResolveType());
9529       break;
9530     }
9531     case DexFile::MethodHandleType::kInstancePut: {
9532       method_params->Set(0, target_field->GetDeclaringClass());
9533       method_params->Set(1, target_field->ResolveType());
9534       return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
9535       break;
9536     }
9537     case DexFile::MethodHandleType::kInstanceGet: {
9538       method_params->Set(0, target_field->GetDeclaringClass());
9539       return_type = hs.NewHandle(target_field->ResolveType());
9540       break;
9541     }
9542     case DexFile::MethodHandleType::kInvokeStatic:
9543     case DexFile::MethodHandleType::kInvokeInstance:
9544     case DexFile::MethodHandleType::kInvokeConstructor:
9545     case DexFile::MethodHandleType::kInvokeDirect:
9546     case DexFile::MethodHandleType::kInvokeInterface:
9547       UNREACHABLE();
9548   }
9549 
9550   for (int32_t i = 0; i < num_params; ++i) {
9551     if (UNLIKELY(method_params->Get(i) == nullptr)) {
9552       DCHECK(self->IsExceptionPending());
9553       return nullptr;
9554     }
9555   }
9556 
9557   if (UNLIKELY(return_type.IsNull())) {
9558     DCHECK(self->IsExceptionPending());
9559     return nullptr;
9560   }
9561 
9562   Handle<mirror::MethodType>
9563       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9564   if (UNLIKELY(method_type.IsNull())) {
9565     DCHECK(self->IsExceptionPending());
9566     return nullptr;
9567   }
9568 
9569   uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9570   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9571 }
9572 
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)9573 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
9574     Thread* self,
9575     const dex::MethodHandleItem& method_handle,
9576     ArtMethod* referrer) {
9577   DexFile::MethodHandleType handle_type =
9578       static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9579   mirror::MethodHandle::Kind kind;
9580   uint32_t receiver_count = 0;
9581   ArtMethod* target_method = nullptr;
9582   switch (handle_type) {
9583     case DexFile::MethodHandleType::kStaticPut:
9584     case DexFile::MethodHandleType::kStaticGet:
9585     case DexFile::MethodHandleType::kInstancePut:
9586     case DexFile::MethodHandleType::kInstanceGet:
9587       UNREACHABLE();
9588     case DexFile::MethodHandleType::kInvokeStatic: {
9589       kind = mirror::MethodHandle::Kind::kInvokeStatic;
9590       receiver_count = 0;
9591       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9592                                                             method_handle.field_or_method_idx_,
9593                                                             referrer,
9594                                                             InvokeType::kStatic);
9595       break;
9596     }
9597     case DexFile::MethodHandleType::kInvokeInstance: {
9598       kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9599       receiver_count = 1;
9600       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9601                                                             method_handle.field_or_method_idx_,
9602                                                             referrer,
9603                                                             InvokeType::kVirtual);
9604       break;
9605     }
9606     case DexFile::MethodHandleType::kInvokeConstructor: {
9607       // Constructors are currently implemented as a transform. They
9608       // are special cased later in this method.
9609       kind = mirror::MethodHandle::Kind::kInvokeTransform;
9610       receiver_count = 0;
9611       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9612                                                             method_handle.field_or_method_idx_,
9613                                                             referrer,
9614                                                             InvokeType::kDirect);
9615       break;
9616     }
9617     case DexFile::MethodHandleType::kInvokeDirect: {
9618       kind = mirror::MethodHandle::Kind::kInvokeDirect;
9619       receiver_count = 1;
9620       StackHandleScope<2> hs(self);
9621       // A constant method handle with type kInvokeDirect can refer to
9622       // a method that is private or to a method in a super class. To
9623       // disambiguate the two options, we resolve the method ignoring
9624       // the invocation type to determine if the method is private. We
9625       // then resolve again specifying the intended invocation type to
9626       // force the appropriate checks.
9627       target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
9628                                                      hs.NewHandle(referrer->GetDexCache()),
9629                                                      hs.NewHandle(referrer->GetClassLoader()));
9630       if (UNLIKELY(target_method == nullptr)) {
9631         break;
9632       }
9633 
9634       if (target_method->IsPrivate()) {
9635         kind = mirror::MethodHandle::Kind::kInvokeDirect;
9636         target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9637                                                               method_handle.field_or_method_idx_,
9638                                                               referrer,
9639                                                               InvokeType::kDirect);
9640       } else {
9641         kind = mirror::MethodHandle::Kind::kInvokeSuper;
9642         target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9643                                                               method_handle.field_or_method_idx_,
9644                                                               referrer,
9645                                                               InvokeType::kSuper);
9646         if (UNLIKELY(target_method == nullptr)) {
9647           break;
9648         }
9649         // Find the method specified in the parent in referring class
9650         // so invoke-super invokes the method in the parent of the
9651         // referrer.
9652         target_method =
9653             referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9654                                                                        kRuntimePointerSize);
9655       }
9656       break;
9657     }
9658     case DexFile::MethodHandleType::kInvokeInterface: {
9659       kind = mirror::MethodHandle::Kind::kInvokeInterface;
9660       receiver_count = 1;
9661       target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9662                                                             method_handle.field_or_method_idx_,
9663                                                             referrer,
9664                                                             InvokeType::kInterface);
9665       break;
9666     }
9667   }
9668 
9669   if (UNLIKELY(target_method == nullptr)) {
9670     DCHECK(Thread::Current()->IsExceptionPending());
9671     return nullptr;
9672   }
9673 
9674   ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9675   ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9676   uint32_t access_flags = target_method->GetAccessFlags();
9677   if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9678     ThrowIllegalAccessErrorMethod(referring_class, target_method);
9679     return nullptr;
9680   }
9681 
9682   // Calculate the number of parameters from the method shorty. We add the
9683   // receiver count (0 or 1) and deduct one for the return value.
9684   uint32_t shorty_length;
9685   target_method->GetShorty(&shorty_length);
9686   int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9687 
9688   StackHandleScope<5> hs(self);
9689   ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
9690   Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9691       mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9692   if (method_params.Get() == nullptr) {
9693     DCHECK(self->IsExceptionPending());
9694     return nullptr;
9695   }
9696 
9697   const DexFile* dex_file = referrer->GetDexFile();
9698   const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
9699   int32_t index = 0;
9700   if (receiver_count != 0) {
9701     // Insert receiver. Use the class identified in the method handle rather than the declaring
9702     // class of the resolved method which may be super class or default interface method
9703     // (b/115964401).
9704     ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9705     // receiver_class should have been resolved when resolving the target method.
9706     DCHECK(receiver_class != nullptr);
9707     method_params->Set(index++, receiver_class);
9708   }
9709 
9710   const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
9711   DexFileParameterIterator it(*dex_file, proto_id);
9712   while (it.HasNext()) {
9713     DCHECK_LT(index, num_params);
9714     const dex::TypeIndex type_idx = it.GetTypeIdx();
9715     ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
9716     if (nullptr == klass) {
9717       DCHECK(self->IsExceptionPending());
9718       return nullptr;
9719     }
9720     method_params->Set(index++, klass);
9721     it.Next();
9722   }
9723 
9724   Handle<mirror::Class> return_type =
9725       hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
9726   if (UNLIKELY(return_type.IsNull())) {
9727     DCHECK(self->IsExceptionPending());
9728     return nullptr;
9729   }
9730 
9731   Handle<mirror::MethodType>
9732       method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9733   if (UNLIKELY(method_type.IsNull())) {
9734     DCHECK(self->IsExceptionPending());
9735     return nullptr;
9736   }
9737 
9738   if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9739     Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9740     Handle<mirror::MethodHandlesLookup> lookup =
9741         hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9742     return lookup->FindConstructor(self, constructor_class, method_type);
9743   }
9744 
9745   uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9746   return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9747 }
9748 
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)9749 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9750                                                               uint32_t method_handle_idx,
9751                                                               ArtMethod* referrer)
9752     REQUIRES_SHARED(Locks::mutator_lock_) {
9753   const DexFile* const dex_file = referrer->GetDexFile();
9754   const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
9755   switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9756     case DexFile::MethodHandleType::kStaticPut:
9757     case DexFile::MethodHandleType::kStaticGet:
9758     case DexFile::MethodHandleType::kInstancePut:
9759     case DexFile::MethodHandleType::kInstanceGet:
9760       return ResolveMethodHandleForField(self, method_handle, referrer);
9761     case DexFile::MethodHandleType::kInvokeStatic:
9762     case DexFile::MethodHandleType::kInvokeInstance:
9763     case DexFile::MethodHandleType::kInvokeConstructor:
9764     case DexFile::MethodHandleType::kInvokeDirect:
9765     case DexFile::MethodHandleType::kInvokeInterface:
9766       return ResolveMethodHandleForMethod(self, method_handle, referrer);
9767   }
9768 }
9769 
IsQuickResolutionStub(const void * entry_point) const9770 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9771   return (entry_point == GetQuickResolutionStub()) ||
9772       (quick_resolution_trampoline_ == entry_point);
9773 }
9774 
IsQuickToInterpreterBridge(const void * entry_point) const9775 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9776   return (entry_point == GetQuickToInterpreterBridge()) ||
9777       (quick_to_interpreter_bridge_trampoline_ == entry_point);
9778 }
9779 
IsQuickGenericJniStub(const void * entry_point) const9780 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9781   return (entry_point == GetQuickGenericJniStub()) ||
9782       (quick_generic_jni_trampoline_ == entry_point);
9783 }
9784 
IsJniDlsymLookupStub(const void * entry_point) const9785 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
9786   return entry_point == GetJniDlsymLookupStub() ||
9787       (jni_dlsym_lookup_trampoline_ == entry_point);
9788 }
9789 
IsJniDlsymLookupCriticalStub(const void * entry_point) const9790 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9791   return entry_point == GetJniDlsymLookupCriticalStub() ||
9792       (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9793 }
9794 
GetRuntimeQuickGenericJniStub() const9795 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9796   return GetQuickGenericJniStub();
9797 }
9798 
SetEntryPointsToInterpreter(ArtMethod * method) const9799 void ClassLinker::SetEntryPointsToInterpreter(ArtMethod* method) const {
9800   if (!method->IsNative()) {
9801     method->SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
9802   } else {
9803     method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub());
9804   }
9805 }
9806 
SetEntryPointsForObsoleteMethod(ArtMethod * method) const9807 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9808   DCHECK(method->IsObsolete());
9809   // We cannot mess with the entrypoints of native methods because they are used to determine how
9810   // large the method's quick stack frame is. Without this information we cannot walk the stacks.
9811   if (!method->IsNative()) {
9812     method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
9813   }
9814 }
9815 
DumpForSigQuit(std::ostream & os)9816 void ClassLinker::DumpForSigQuit(std::ostream& os) {
9817   ScopedObjectAccess soa(Thread::Current());
9818   ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
9819   os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
9820      << NumNonZygoteClasses() << "\n";
9821   ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
9822   os << "Dumping registered class loaders\n";
9823   size_t class_loader_index = 0;
9824   for (const ClassLoaderData& class_loader : class_loaders_) {
9825     ObjPtr<mirror::ClassLoader> loader =
9826         ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
9827     if (loader != nullptr) {
9828       os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
9829       bool saw_one_dex_file = false;
9830       for (const DexCacheData& dex_cache : dex_caches_) {
9831         if (dex_cache.IsValid() && dex_cache.class_table == class_loader.class_table) {
9832           if (saw_one_dex_file) {
9833             os << ":";
9834           }
9835           saw_one_dex_file = true;
9836           os << dex_cache.dex_file->GetLocation();
9837         }
9838       }
9839       os << "]";
9840       bool found_parent = false;
9841       if (loader->GetParent() != nullptr) {
9842         size_t parent_index = 0;
9843         for (const ClassLoaderData& class_loader2 : class_loaders_) {
9844           ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
9845               soa.Self()->DecodeJObject(class_loader2.weak_root));
9846           if (loader2 == loader->GetParent()) {
9847             os << ", parent #" << parent_index;
9848             found_parent = true;
9849             break;
9850           }
9851           parent_index++;
9852         }
9853         if (!found_parent) {
9854           os << ", unregistered parent of type "
9855              << loader->GetParent()->GetClass()->PrettyDescriptor();
9856         }
9857       } else {
9858         os << ", no parent";
9859       }
9860       os << "\n";
9861     }
9862   }
9863   os << "Done dumping class loaders\n";
9864   Runtime* runtime = Runtime::Current();
9865   os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
9866      << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
9867 }
9868 
9869 class CountClassesVisitor : public ClassLoaderVisitor {
9870  public:
CountClassesVisitor()9871   CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
9872 
Visit(ObjPtr<mirror::ClassLoader> class_loader)9873   void Visit(ObjPtr<mirror::ClassLoader> class_loader)
9874       REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
9875     ClassTable* const class_table = class_loader->GetClassTable();
9876     if (class_table != nullptr) {
9877       num_zygote_classes += class_table->NumZygoteClasses(class_loader);
9878       num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
9879     }
9880   }
9881 
9882   size_t num_zygote_classes;
9883   size_t num_non_zygote_classes;
9884 };
9885 
NumZygoteClasses() const9886 size_t ClassLinker::NumZygoteClasses() const {
9887   CountClassesVisitor visitor;
9888   VisitClassLoaders(&visitor);
9889   return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
9890 }
9891 
NumNonZygoteClasses() const9892 size_t ClassLinker::NumNonZygoteClasses() const {
9893   CountClassesVisitor visitor;
9894   VisitClassLoaders(&visitor);
9895   return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
9896 }
9897 
NumLoadedClasses()9898 size_t ClassLinker::NumLoadedClasses() {
9899   ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
9900   // Only return non zygote classes since these are the ones which apps which care about.
9901   return NumNonZygoteClasses();
9902 }
9903 
GetClassesLockOwner()9904 pid_t ClassLinker::GetClassesLockOwner() {
9905   return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
9906 }
9907 
GetDexLockOwner()9908 pid_t ClassLinker::GetDexLockOwner() {
9909   return Locks::dex_lock_->GetExclusiveOwnerTid();
9910 }
9911 
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)9912 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
9913   DCHECK(!init_done_);
9914 
9915   DCHECK(klass != nullptr);
9916   DCHECK(klass->GetClassLoader() == nullptr);
9917 
9918   mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
9919   DCHECK(class_roots != nullptr);
9920   DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
9921   int32_t index = static_cast<int32_t>(class_root);
9922   DCHECK(class_roots->Get(index) == nullptr);
9923   class_roots->Set<false>(index, klass);
9924 }
9925 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries)9926 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
9927     Thread* self,
9928     const std::vector<const DexFile*>& dex_files,
9929     Handle<mirror::Class> loader_class,
9930     Handle<mirror::ClassLoader> parent_loader,
9931     Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries) {
9932 
9933   StackHandleScope<5> hs(self);
9934 
9935   ArtField* dex_elements_field =
9936       jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList_dexElements);
9937 
9938   Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
9939   DCHECK(dex_elements_class != nullptr);
9940   DCHECK(dex_elements_class->IsArrayClass());
9941   Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
9942       mirror::ObjectArray<mirror::Object>::Alloc(self,
9943                                                  dex_elements_class.Get(),
9944                                                  dex_files.size())));
9945   Handle<mirror::Class> h_dex_element_class =
9946       hs.NewHandle(dex_elements_class->GetComponentType());
9947 
9948   ArtField* element_file_field =
9949       jni::DecodeArtField(WellKnownClasses::dalvik_system_DexPathList__Element_dexFile);
9950   DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
9951 
9952   ArtField* cookie_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
9953   DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
9954 
9955   ArtField* file_name_field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_fileName);
9956   DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
9957 
9958   // Fill the elements array.
9959   int32_t index = 0;
9960   for (const DexFile* dex_file : dex_files) {
9961     StackHandleScope<4> hs2(self);
9962 
9963     // CreateWellKnownClassLoader is only used by gtests and compiler.
9964     // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
9965     Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
9966         self,
9967         kDexFileIndexStart + 1));
9968     DCHECK(h_long_array != nullptr);
9969     h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
9970 
9971     // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
9972     // FinalizerReference which will never get cleaned up without a started runtime.
9973     Handle<mirror::Object> h_dex_file = hs2.NewHandle(
9974         cookie_field->GetDeclaringClass()->AllocObject(self));
9975     DCHECK(h_dex_file != nullptr);
9976     cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
9977 
9978     Handle<mirror::String> h_file_name = hs2.NewHandle(
9979         mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
9980     DCHECK(h_file_name != nullptr);
9981     file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
9982 
9983     Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
9984     DCHECK(h_element != nullptr);
9985     element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
9986 
9987     h_dex_elements->Set(index, h_element.Get());
9988     index++;
9989   }
9990   DCHECK_EQ(index, h_dex_elements->GetLength());
9991 
9992   // Create DexPathList.
9993   Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
9994       dex_elements_field->GetDeclaringClass()->AllocObject(self));
9995   DCHECK(h_dex_path_list != nullptr);
9996   // Set elements.
9997   dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
9998   // Create an empty List for the "nativeLibraryDirectories," required for native tests.
9999   // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10000   //       elements.
10001   {
10002     ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10003         FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10004     DCHECK(native_lib_dirs != nullptr);
10005     ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10006     DCHECK(list_class != nullptr);
10007     {
10008       StackHandleScope<1> h_list_scope(self);
10009       Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10010       bool list_init = EnsureInitialized(self, h_list_class, true, true);
10011       DCHECK(list_init);
10012       list_class = h_list_class.Get();
10013     }
10014     ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10015     // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10016     //       is fine for testing. While it violates a Java-code invariant (the elementData field is
10017     //       normally never null), as long as one does not try to add elements, this will still
10018     //       work.
10019     native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10020   }
10021 
10022   // Create the class loader..
10023   Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10024       ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
10025   DCHECK(h_class_loader != nullptr);
10026   // Set DexPathList.
10027   ArtField* path_list_field =
10028       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList);
10029   DCHECK(path_list_field != nullptr);
10030   path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
10031 
10032   // Make a pretend boot-classpath.
10033   // TODO: Should we scan the image?
10034   ArtField* const parent_field =
10035       mirror::Class::FindField(self,
10036                                h_class_loader->GetClass(),
10037                                "parent",
10038                                "Ljava/lang/ClassLoader;");
10039   DCHECK(parent_field != nullptr);
10040   if (parent_loader.Get() == nullptr) {
10041     ScopedObjectAccessUnchecked soa(self);
10042     ObjPtr<mirror::Object> boot_loader(soa.Decode<mirror::Class>(
10043         WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self));
10044     parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10045   } else {
10046     parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10047   }
10048 
10049   ArtField* shared_libraries_field =
10050       jni::DecodeArtField(WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders);
10051   DCHECK(shared_libraries_field != nullptr);
10052   shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10053 
10054   return h_class_loader.Get();
10055 }
10056 
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,jclass loader_class,jobject parent_loader,jobject shared_libraries)10057 jobject ClassLinker::CreateWellKnownClassLoader(Thread* self,
10058                                                 const std::vector<const DexFile*>& dex_files,
10059                                                 jclass loader_class,
10060                                                 jobject parent_loader,
10061                                                 jobject shared_libraries) {
10062   CHECK(self->GetJniEnv()->IsSameObject(loader_class,
10063                                         WellKnownClasses::dalvik_system_PathClassLoader) ||
10064         self->GetJniEnv()->IsSameObject(loader_class,
10065                                         WellKnownClasses::dalvik_system_DelegateLastClassLoader) ||
10066         self->GetJniEnv()->IsSameObject(loader_class,
10067                                         WellKnownClasses::dalvik_system_InMemoryDexClassLoader));
10068 
10069   // SOAAlreadyRunnable is protected, and we need something to add a global reference.
10070   // We could move the jobject to the callers, but all call-sites do this...
10071   ScopedObjectAccessUnchecked soa(self);
10072 
10073   // For now, create a libcore-level DexFile for each ART DexFile. This "explodes" multidex.
10074   StackHandleScope<4> hs(self);
10075 
10076   Handle<mirror::Class> h_loader_class =
10077       hs.NewHandle<mirror::Class>(soa.Decode<mirror::Class>(loader_class));
10078   Handle<mirror::ClassLoader> h_parent =
10079       hs.NewHandle<mirror::ClassLoader>(soa.Decode<mirror::ClassLoader>(parent_loader));
10080   Handle<mirror::ObjectArray<mirror::ClassLoader>> h_shared_libraries =
10081       hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ClassLoader>>(shared_libraries));
10082 
10083   ObjPtr<mirror::ClassLoader> loader = CreateWellKnownClassLoader(
10084       self,
10085       dex_files,
10086       h_loader_class,
10087       h_parent,
10088       h_shared_libraries);
10089 
10090   // Make it a global ref and return.
10091   ScopedLocalRef<jobject> local_ref(
10092       soa.Env(), soa.Env()->AddLocalReference<jobject>(loader));
10093   return soa.Env()->NewGlobalRef(local_ref.get());
10094 }
10095 
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)10096 jobject ClassLinker::CreatePathClassLoader(Thread* self,
10097                                            const std::vector<const DexFile*>& dex_files) {
10098   return CreateWellKnownClassLoader(self,
10099                                     dex_files,
10100                                     WellKnownClasses::dalvik_system_PathClassLoader,
10101                                     nullptr);
10102 }
10103 
DropFindArrayClassCache()10104 void ClassLinker::DropFindArrayClassCache() {
10105   std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10106   find_array_class_cache_next_victim_ = 0;
10107 }
10108 
VisitClassLoaders(ClassLoaderVisitor * visitor) const10109 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
10110   Thread* const self = Thread::Current();
10111   for (const ClassLoaderData& data : class_loaders_) {
10112     // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10113     ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10114         self->DecodeJObject(data.weak_root));
10115     if (class_loader != nullptr) {
10116       visitor->Visit(class_loader);
10117     }
10118   }
10119 }
10120 
VisitAllocators(AllocatorVisitor * visitor) const10121 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10122   for (const ClassLoaderData& data : class_loaders_) {
10123     LinearAlloc* alloc = data.allocator;
10124     if (alloc != nullptr && !visitor->Visit(alloc)) {
10125         break;
10126     }
10127   }
10128 }
10129 
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)10130 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10131                                                ObjPtr<mirror::ClassLoader> class_loader) {
10132   DCHECK(dex_file != nullptr);
10133   Thread* const self = Thread::Current();
10134   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10135   ClassTable* const table = ClassTableForClassLoader(class_loader);
10136   DCHECK(table != nullptr);
10137   if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
10138     // It was not already inserted, perform the write barrier to let the GC know the class loader's
10139     // class table was modified.
10140     WriteBarrier::ForEveryFieldWrite(class_loader);
10141   }
10142 }
10143 
CleanupClassLoaders()10144 void ClassLinker::CleanupClassLoaders() {
10145   Thread* const self = Thread::Current();
10146   std::vector<ClassLoaderData> to_delete;
10147   // Do the delete outside the lock to avoid lock violation in jit code cache.
10148   {
10149     WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10150     for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10151       const ClassLoaderData& data = *it;
10152       // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10153       ObjPtr<mirror::ClassLoader> class_loader =
10154           ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
10155       if (class_loader != nullptr) {
10156         ++it;
10157       } else {
10158         VLOG(class_linker) << "Freeing class loader";
10159         to_delete.push_back(data);
10160         it = class_loaders_.erase(it);
10161       }
10162     }
10163   }
10164   for (ClassLoaderData& data : to_delete) {
10165     // CHA unloading analysis and SingleImplementaion cleanups are required.
10166     DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
10167   }
10168 }
10169 
10170 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10171  public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)10172   FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10173       : method_(method),
10174         pointer_size_(pointer_size) {}
10175 
operator ()(ObjPtr<mirror::Class> klass)10176   bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
10177     if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10178       holder_ = klass;
10179     }
10180     // Return false to stop searching if holder_ is not null.
10181     return holder_ == nullptr;
10182   }
10183 
10184   ObjPtr<mirror::Class> holder_ = nullptr;
10185   const ArtMethod* const method_;
10186   const PointerSize pointer_size_;
10187 };
10188 
GetHoldingClassOfCopiedMethod(ArtMethod * method)10189 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
10190   ScopedTrace trace(__FUNCTION__);  // Since this function is slow, have a trace to notify people.
10191   CHECK(method->IsCopied());
10192   FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10193   VisitClasses(&visitor);
10194   return visitor.holder_;
10195 }
10196 
AllocIfTable(Thread * self,size_t ifcount)10197 ObjPtr<mirror::IfTable> ClassLinker::AllocIfTable(Thread* self, size_t ifcount) {
10198   return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
10199       mirror::IfTable::Alloc(self,
10200                              GetClassRoot<mirror::ObjectArray<mirror::Object>>(this),
10201                              ifcount * mirror::IfTable::kMax)));
10202 }
10203 
IsUpdatableBootClassPathDescriptor(const char * descriptor ATTRIBUTE_UNUSED)10204 bool ClassLinker::IsUpdatableBootClassPathDescriptor(const char* descriptor ATTRIBUTE_UNUSED) {
10205   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10206   LOG(FATAL) << "UNREACHABLE";
10207   UNREACHABLE();
10208 }
10209 
DenyAccessBasedOnPublicSdk(ArtMethod * art_method ATTRIBUTE_UNUSED) const10210 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10211     REQUIRES_SHARED(Locks::mutator_lock_) {
10212   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10213   LOG(FATAL) << "UNREACHABLE";
10214   UNREACHABLE();
10215 }
10216 
DenyAccessBasedOnPublicSdk(ArtField * art_field ATTRIBUTE_UNUSED) const10217 bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10218     REQUIRES_SHARED(Locks::mutator_lock_) {
10219   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10220   LOG(FATAL) << "UNREACHABLE";
10221   UNREACHABLE();
10222 }
10223 
DenyAccessBasedOnPublicSdk(const char * type_descriptor ATTRIBUTE_UNUSED) const10224 bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10225   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10226   LOG(FATAL) << "UNREACHABLE";
10227   UNREACHABLE();
10228 }
10229 
SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED)10230 void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10231   // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10232   LOG(FATAL) << "UNREACHABLE";
10233   UNREACHABLE();
10234 }
10235 
10236 // Instantiate ClassLinker::ResolveMethod.
10237 template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
10238     uint32_t method_idx,
10239     Handle<mirror::DexCache> dex_cache,
10240     Handle<mirror::ClassLoader> class_loader,
10241     ArtMethod* referrer,
10242     InvokeType type);
10243 template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
10244     uint32_t method_idx,
10245     Handle<mirror::DexCache> dex_cache,
10246     Handle<mirror::ClassLoader> class_loader,
10247     ArtMethod* referrer,
10248     InvokeType type);
10249 
10250 // Instantiate ClassLinker::AllocClass.
10251 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
10252     Thread* self,
10253     ObjPtr<mirror::Class> java_lang_Class,
10254     uint32_t class_size);
10255 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
10256     Thread* self,
10257     ObjPtr<mirror::Class> java_lang_Class,
10258     uint32_t class_size);
10259 
10260 }  // namespace art
10261