1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "class_linker.h"
18
19 #include <unistd.h>
20
21 #include <algorithm>
22 #include <deque>
23 #include <forward_list>
24 #include <iostream>
25 #include <iterator>
26 #include <map>
27 #include <memory>
28 #include <queue>
29 #include <string>
30 #include <string_view>
31 #include <tuple>
32 #include <utility>
33 #include <vector>
34
35 #include "android-base/macros.h"
36 #include "android-base/stringprintf.h"
37 #include "android-base/strings.h"
38 #include "art_field-inl.h"
39 #include "art_method-inl.h"
40 #include "barrier.h"
41 #include "base/arena_allocator.h"
42 #include "base/arena_bit_vector.h"
43 #include "base/casts.h"
44 #include "base/file_utils.h"
45 #include "base/hash_map.h"
46 #include "base/hash_set.h"
47 #include "base/leb128.h"
48 #include "base/logging.h"
49 #include "base/mem_map_arena_pool.h"
50 #include "base/membarrier.h"
51 #include "base/metrics/metrics.h"
52 #include "base/mutex-inl.h"
53 #include "base/os.h"
54 #include "base/pointer_size.h"
55 #include "base/quasi_atomic.h"
56 #include "base/scoped_arena_containers.h"
57 #include "base/scoped_flock.h"
58 #include "base/stl_util.h"
59 #include "base/systrace.h"
60 #include "base/time_utils.h"
61 #include "base/unix_file/fd_file.h"
62 #include "base/utils.h"
63 #include "base/value_object.h"
64 #include "cha.h"
65 #include "class_linker-inl.h"
66 #include "class_loader_utils.h"
67 #include "class_root-inl.h"
68 #include "class_table-inl.h"
69 #include "compiler_callbacks.h"
70 #include "debug_print.h"
71 #include "debugger.h"
72 #include "dex/class_accessor-inl.h"
73 #include "dex/descriptors_names.h"
74 #include "dex/dex_file-inl.h"
75 #include "dex/dex_file_annotations.h"
76 #include "dex/dex_file_exception_helpers.h"
77 #include "dex/dex_file_loader.h"
78 #include "dex/signature-inl.h"
79 #include "dex/utf.h"
80 #include "entrypoints/entrypoint_utils-inl.h"
81 #include "entrypoints/runtime_asm_entrypoints.h"
82 #include "experimental_flags.h"
83 #include "gc/accounting/card_table-inl.h"
84 #include "gc/accounting/heap_bitmap-inl.h"
85 #include "gc/accounting/space_bitmap-inl.h"
86 #include "gc/heap-visit-objects-inl.h"
87 #include "gc/heap.h"
88 #include "gc/scoped_gc_critical_section.h"
89 #include "gc/space/image_space.h"
90 #include "gc/space/space-inl.h"
91 #include "gc_root-inl.h"
92 #include "handle_scope-inl.h"
93 #include "hidden_api.h"
94 #include "imt_conflict_table.h"
95 #include "imtable-inl.h"
96 #include "intern_table-inl.h"
97 #include "interpreter/interpreter.h"
98 #include "interpreter/mterp/nterp.h"
99 #include "jit/debugger_interface.h"
100 #include "jit/jit.h"
101 #include "jit/jit_code_cache.h"
102 #include "jni/java_vm_ext.h"
103 #include "jni/jni_internal.h"
104 #include "linear_alloc-inl.h"
105 #include "mirror/array-alloc-inl.h"
106 #include "mirror/array-inl.h"
107 #include "mirror/call_site.h"
108 #include "mirror/class-alloc-inl.h"
109 #include "mirror/class-inl.h"
110 #include "mirror/class.h"
111 #include "mirror/class_ext.h"
112 #include "mirror/class_loader.h"
113 #include "mirror/dex_cache-inl.h"
114 #include "mirror/dex_cache.h"
115 #include "mirror/emulated_stack_frame.h"
116 #include "mirror/field.h"
117 #include "mirror/iftable-inl.h"
118 #include "mirror/method.h"
119 #include "mirror/method_handle_impl.h"
120 #include "mirror/method_handles_lookup.h"
121 #include "mirror/method_type-inl.h"
122 #include "mirror/object-inl.h"
123 #include "mirror/object-refvisitor-inl.h"
124 #include "mirror/object.h"
125 #include "mirror/object_array-alloc-inl.h"
126 #include "mirror/object_array-inl.h"
127 #include "mirror/object_array.h"
128 #include "mirror/object_reference-inl.h"
129 #include "mirror/object_reference.h"
130 #include "mirror/proxy.h"
131 #include "mirror/reference-inl.h"
132 #include "mirror/stack_trace_element.h"
133 #include "mirror/string-inl.h"
134 #include "mirror/throwable.h"
135 #include "mirror/var_handle.h"
136 #include "native/dalvik_system_DexFile.h"
137 #include "nativehelper/scoped_local_ref.h"
138 #include "nterp_helpers-inl.h"
139 #include "nterp_helpers.h"
140 #include "oat/image-inl.h"
141 #include "oat/oat.h"
142 #include "oat/oat_file-inl.h"
143 #include "oat/oat_file.h"
144 #include "oat/oat_file_assistant.h"
145 #include "oat/oat_file_manager.h"
146 #include "object_lock.h"
147 #include "profile/profile_compilation_info.h"
148 #include "runtime.h"
149 #include "runtime_callbacks.h"
150 #include "scoped_thread_state_change-inl.h"
151 #include "startup_completed_task.h"
152 #include "thread-inl.h"
153 #include "thread.h"
154 #include "thread_list.h"
155 #include "trace.h"
156 #include "transaction.h"
157 #include "vdex_file.h"
158 #include "verifier/class_verifier.h"
159 #include "verifier/verifier_deps.h"
160 #include "well_known_classes.h"
161
162 namespace art HIDDEN {
163
164 using android::base::StringPrintf;
165
166 static constexpr bool kCheckImageObjects = kIsDebugBuild;
167 static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
168
169 static void ThrowNoClassDefFoundError(const char* fmt, ...)
170 __attribute__((__format__(__printf__, 1, 2)))
171 REQUIRES_SHARED(Locks::mutator_lock_);
ThrowNoClassDefFoundError(const char * fmt,...)172 static void ThrowNoClassDefFoundError(const char* fmt, ...) {
173 va_list args;
174 va_start(args, fmt);
175 Thread* self = Thread::Current();
176 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
177 va_end(args);
178 }
179
GetErroneousStateError(ObjPtr<mirror::Class> c)180 static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
181 REQUIRES_SHARED(Locks::mutator_lock_) {
182 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
183 if (ext == nullptr) {
184 return nullptr;
185 } else {
186 return ext->GetErroneousStateError();
187 }
188 }
189
IsVerifyError(ObjPtr<mirror::Object> obj)190 static bool IsVerifyError(ObjPtr<mirror::Object> obj)
191 REQUIRES_SHARED(Locks::mutator_lock_) {
192 // This is slow, but we only use it for rethrowing an error, and for DCHECK.
193 return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
194 }
195
196 // Helper for ThrowEarlierClassFailure. Throws the stored error.
HandleEarlierErroneousStateError(Thread * self,ClassLinker * class_linker,ObjPtr<mirror::Class> c)197 static void HandleEarlierErroneousStateError(Thread* self,
198 ClassLinker* class_linker,
199 ObjPtr<mirror::Class> c)
200 REQUIRES_SHARED(Locks::mutator_lock_) {
201 ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
202 DCHECK(obj != nullptr);
203 self->AssertNoPendingException();
204 DCHECK(!obj->IsClass());
205 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
206 ObjPtr<mirror::Class> error_class = obj->GetClass();
207 CHECK(throwable_class->IsAssignableFrom(error_class));
208 self->SetException(obj->AsThrowable());
209 self->AssertPendingException();
210 }
211
UpdateClassAfterVerification(Handle<mirror::Class> klass,PointerSize pointer_size,verifier::FailureKind failure_kind)212 static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
213 PointerSize pointer_size,
214 verifier::FailureKind failure_kind)
215 REQUIRES_SHARED(Locks::mutator_lock_) {
216 Runtime* runtime = Runtime::Current();
217 ClassLinker* class_linker = runtime->GetClassLinker();
218 if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
219 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
220 }
221
222 // Now that the class has passed verification, try to set nterp entrypoints
223 // to methods that currently use the switch interpreter.
224 if (interpreter::CanRuntimeUseNterp()) {
225 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
226 if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
227 runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
228 }
229 }
230 }
231 }
232
233 // Callback responsible for making a batch of classes visibly initialized after ensuring
234 // visibility for all threads, either by using `membarrier()` or by running a checkpoint.
235 class ClassLinker::VisiblyInitializedCallback final
236 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
237 public:
VisiblyInitializedCallback(ClassLinker * class_linker)238 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
239 : class_linker_(class_linker),
240 num_classes_(0u),
241 thread_visibility_counter_(0),
242 barriers_() {
243 std::fill_n(classes_, kMaxClasses, nullptr);
244 }
245
IsEmpty() const246 bool IsEmpty() const {
247 DCHECK_LE(num_classes_, kMaxClasses);
248 return num_classes_ == 0u;
249 }
250
IsFull() const251 bool IsFull() const {
252 DCHECK_LE(num_classes_, kMaxClasses);
253 return num_classes_ == kMaxClasses;
254 }
255
AddClass(Thread * self,ObjPtr<mirror::Class> klass)256 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
257 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
258 DCHECK(!IsFull());
259 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
260 ++num_classes_;
261 }
262
AddBarrier(Barrier * barrier)263 void AddBarrier(Barrier* barrier) {
264 barriers_.push_front(barrier);
265 }
266
GetAndClearBarriers()267 std::forward_list<Barrier*> GetAndClearBarriers() {
268 std::forward_list<Barrier*> result;
269 result.swap(barriers_);
270 result.reverse(); // Return barriers in insertion order.
271 return result;
272 }
273
MakeVisible(Thread * self)274 void MakeVisible(Thread* self) {
275 if (class_linker_->visibly_initialize_classes_with_membarier_) {
276 // If the associated register command succeeded, this command should never fail.
277 int membarrier_result = art::membarrier(MembarrierCommand::kPrivateExpedited);
278 CHECK_EQ(membarrier_result, 0) << strerror(errno);
279 MarkVisiblyInitialized(self);
280 } else {
281 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
282 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
283 AdjustThreadVisibilityCounter(self, count);
284 }
285 }
286
Run(Thread * self)287 void Run(Thread* self) override {
288 AdjustThreadVisibilityCounter(self, -1);
289 }
290
291 private:
AdjustThreadVisibilityCounter(Thread * self,ssize_t adjustment)292 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
293 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
294 if (old + adjustment == 0) {
295 // All threads passed the checkpoint. Mark classes as visibly initialized.
296 MarkVisiblyInitialized(self);
297 }
298 }
299
MarkVisiblyInitialized(Thread * self)300 void MarkVisiblyInitialized(Thread* self) {
301 {
302 ScopedObjectAccess soa(self);
303 StackHandleScope<1u> hs(self);
304 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
305 JavaVMExt* vm = self->GetJniEnv()->GetVm();
306 for (size_t i = 0, num = num_classes_; i != num; ++i) {
307 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
308 vm->DeleteWeakGlobalRef(self, classes_[i]);
309 if (klass != nullptr) {
310 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
311 class_linker_->FixupStaticTrampolines(self, klass.Get());
312 }
313 }
314 num_classes_ = 0u;
315 }
316 class_linker_->VisiblyInitializedCallbackDone(self, this);
317 }
318
319 // Making classes initialized in bigger batches helps with app startup for apps
320 // that initialize a lot of classes by running fewer synchronization functions.
321 // (On the other hand, bigger batches make class initialization checks more
322 // likely to take a slow path but that is mitigated by making partially
323 // filled buffers visibly initialized if we take the slow path many times.
324 // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
325 static constexpr size_t kMaxClasses = 48;
326
327 ClassLinker* const class_linker_;
328 size_t num_classes_;
329 jweak classes_[kMaxClasses];
330
331 // The thread visibility counter starts at 0 and it is incremented by the number of
332 // threads that need to run this callback (by the thread that request the callback
333 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
334 // whether after the increment or after a decrement, we know that `Run()` was executed
335 // for all threads and therefore we can mark the classes as visibly initialized.
336 // Used only if the preferred `membarrier()` command is unsupported.
337 std::atomic<ssize_t> thread_visibility_counter_;
338
339 // List of barries to `Pass()` for threads that wait for the callback to complete.
340 std::forward_list<Barrier*> barriers_;
341 };
342
MakeInitializedClassesVisiblyInitialized(Thread * self,bool wait)343 void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
344 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
345 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
346 }
347 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
348 if (wait) {
349 Locks::mutator_lock_->AssertNotHeld(self);
350 maybe_barrier.emplace(0);
351 }
352 int wait_count = 0;
353 VisiblyInitializedCallback* callback = nullptr;
354 {
355 MutexLock lock(self, visibly_initialized_callback_lock_);
356 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
357 callback = visibly_initialized_callback_.release();
358 running_visibly_initialized_callbacks_.push_front(*callback);
359 }
360 if (wait) {
361 DCHECK(maybe_barrier.has_value());
362 Barrier* barrier = std::addressof(*maybe_barrier);
363 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
364 cb.AddBarrier(barrier);
365 ++wait_count;
366 }
367 }
368 }
369 if (callback != nullptr) {
370 callback->MakeVisible(self);
371 }
372 if (wait_count != 0) {
373 DCHECK(maybe_barrier.has_value());
374 maybe_barrier->Increment(self, wait_count);
375 }
376 }
377
VisiblyInitializedCallbackDone(Thread * self,VisiblyInitializedCallback * callback)378 void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
379 VisiblyInitializedCallback* callback) {
380 MutexLock lock(self, visibly_initialized_callback_lock_);
381 // Pass the barriers if requested.
382 for (Barrier* barrier : callback->GetAndClearBarriers()) {
383 barrier->Pass(self);
384 }
385 // Remove the callback from the list of running callbacks.
386 auto before = running_visibly_initialized_callbacks_.before_begin();
387 auto it = running_visibly_initialized_callbacks_.begin();
388 DCHECK(it != running_visibly_initialized_callbacks_.end());
389 while (std::addressof(*it) != callback) {
390 before = it;
391 ++it;
392 DCHECK(it != running_visibly_initialized_callbacks_.end());
393 }
394 running_visibly_initialized_callbacks_.erase_after(before);
395 // Reuse or destroy the callback object.
396 if (visibly_initialized_callback_ == nullptr) {
397 visibly_initialized_callback_.reset(callback);
398 } else {
399 delete callback;
400 }
401 }
402
ForceClassInitialized(Thread * self,Handle<mirror::Class> klass)403 void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
404 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
405 if (cb != nullptr) {
406 cb->MakeVisible(self);
407 }
408 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
409 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
410 }
411
FindBootJniStub(JniStubKey key)412 const void* ClassLinker::FindBootJniStub(JniStubKey key) {
413 auto it = boot_image_jni_stubs_.find(key);
414 if (it == boot_image_jni_stubs_.end()) {
415 return nullptr;
416 } else {
417 return it->second;
418 }
419 }
420
MarkClassInitialized(Thread * self,Handle<mirror::Class> klass)421 ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
422 Thread* self, Handle<mirror::Class> klass) {
423 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
424 // Thanks to the x86 memory model, we do not need any memory fences and
425 // we can immediately mark the class as visibly initialized.
426 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
427 FixupStaticTrampolines(self, klass.Get());
428 return nullptr;
429 }
430 if (Runtime::Current()->IsActiveTransaction()) {
431 // Transactions are single-threaded, so we can mark the class as visibly intialized.
432 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
433 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
434 FixupStaticTrampolines(self, klass.Get());
435 return nullptr;
436 }
437 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
438 MutexLock lock(self, visibly_initialized_callback_lock_);
439 if (visibly_initialized_callback_ == nullptr) {
440 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
441 }
442 DCHECK(!visibly_initialized_callback_->IsFull());
443 visibly_initialized_callback_->AddClass(self, klass.Get());
444
445 if (visibly_initialized_callback_->IsFull()) {
446 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
447 running_visibly_initialized_callbacks_.push_front(*callback);
448 return callback;
449 } else {
450 return nullptr;
451 }
452 }
453
RegisterNative(Thread * self,ArtMethod * method,const void * native_method)454 const void* ClassLinker::RegisterNative(
455 Thread* self, ArtMethod* method, const void* native_method) {
456 CHECK(method->IsNative()) << method->PrettyMethod();
457 CHECK(native_method != nullptr) << method->PrettyMethod();
458 void* new_native_method = nullptr;
459 Runtime* runtime = Runtime::Current();
460 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
461 native_method,
462 /*out*/&new_native_method);
463 if (method->IsCriticalNative()) {
464 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
465 // Remove old registered method if any.
466 auto it = critical_native_code_with_clinit_check_.find(method);
467 if (it != critical_native_code_with_clinit_check_.end()) {
468 critical_native_code_with_clinit_check_.erase(it);
469 }
470 // To ensure correct memory visibility, we need the class to be visibly
471 // initialized before we can set the JNI entrypoint.
472 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
473 method->SetEntryPointFromJni(new_native_method);
474 } else {
475 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
476 }
477 } else {
478 method->SetEntryPointFromJni(new_native_method);
479 }
480 return new_native_method;
481 }
482
UnregisterNative(Thread * self,ArtMethod * method)483 void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
484 CHECK(method->IsNative()) << method->PrettyMethod();
485 // Restore stub to lookup native pointer via dlsym.
486 if (method->IsCriticalNative()) {
487 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
488 auto it = critical_native_code_with_clinit_check_.find(method);
489 if (it != critical_native_code_with_clinit_check_.end()) {
490 critical_native_code_with_clinit_check_.erase(it);
491 }
492 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
493 } else {
494 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
495 }
496 }
497
GetRegisteredNative(Thread * self,ArtMethod * method)498 const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
499 if (method->IsCriticalNative()) {
500 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
501 auto it = critical_native_code_with_clinit_check_.find(method);
502 if (it != critical_native_code_with_clinit_check_.end()) {
503 return it->second;
504 }
505 const void* native_code = method->GetEntryPointFromJni();
506 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
507 } else {
508 const void* native_code = method->GetEntryPointFromJni();
509 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
510 }
511 }
512
ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,bool wrap_in_no_class_def,bool log)513 void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
514 bool wrap_in_no_class_def,
515 bool log) {
516 // The class failed to initialize on a previous attempt, so we want to throw
517 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
518 // failed in verification, in which case v2 5.4.1 says we need to re-throw
519 // the previous error.
520 Runtime* const runtime = Runtime::Current();
521 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
522 std::string extra;
523 ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
524 if (verify_error != nullptr) {
525 DCHECK(!verify_error->IsClass());
526 extra = verify_error->AsThrowable()->Dump();
527 }
528 if (log) {
529 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
530 << ": " << extra;
531 }
532 }
533
534 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
535 Thread* self = Thread::Current();
536 if (runtime->IsAotCompiler()) {
537 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
538 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
539 self->SetException(pre_allocated);
540 } else {
541 ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
542 if (erroneous_state_error != nullptr) {
543 // Rethrow stored error.
544 HandleEarlierErroneousStateError(self, this, c);
545 }
546 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
547 // might have meant to go down the earlier if statement with the original error but it got
548 // swallowed by the OOM so we end up here.
549 if (erroneous_state_error == nullptr ||
550 (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
551 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
552 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
553 // exception will be a cause.
554 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
555 c->PrettyDescriptor().c_str());
556 }
557 }
558 }
559
VlogClassInitializationFailure(Handle<mirror::Class> klass)560 static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
561 REQUIRES_SHARED(Locks::mutator_lock_) {
562 if (VLOG_IS_ON(class_linker)) {
563 std::string temp;
564 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
565 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
566 }
567 }
568
WrapExceptionInInitializer(Handle<mirror::Class> klass)569 static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
570 REQUIRES_SHARED(Locks::mutator_lock_) {
571 Thread* self = Thread::Current();
572
573 ObjPtr<mirror::Throwable> cause = self->GetException();
574 CHECK(cause != nullptr);
575
576 // Boot classpath classes should not fail initialization. This is a consistency debug check.
577 // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
578 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
579 std::string tmp;
580 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
581 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
582 // could have caused the error.
583 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
584 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
585 << " failed initialization: "
586 << self->GetException()->Dump();
587 }
588
589 // We only wrap non-Error exceptions; an Error can just be used as-is.
590 if (!cause->IsError()) {
591 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
592 }
593 VlogClassInitializationFailure(klass);
594 }
595
RegisterMemBarrierForClassInitialization()596 static bool RegisterMemBarrierForClassInitialization() {
597 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
598 // Thanks to the x86 memory model, classes skip the initialized status, so there is no need
599 // to use `membarrier()` or other synchronization for marking classes visibly initialized.
600 return false;
601 }
602 int membarrier_result = art::membarrier(MembarrierCommand::kRegisterPrivateExpedited);
603 return membarrier_result == 0;
604 }
605
ClassLinker(InternTable * intern_table,bool fast_class_not_found_exceptions)606 ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
607 : boot_class_table_(new ClassTable()),
608 failed_dex_cache_class_lookups_(0),
609 class_roots_(nullptr),
610 find_array_class_cache_next_victim_(0),
611 init_done_(false),
612 log_new_roots_(false),
613 intern_table_(intern_table),
614 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
615 jni_dlsym_lookup_trampoline_(nullptr),
616 jni_dlsym_lookup_critical_trampoline_(nullptr),
617 quick_resolution_trampoline_(nullptr),
618 quick_imt_conflict_trampoline_(nullptr),
619 quick_generic_jni_trampoline_(nullptr),
620 quick_to_interpreter_bridge_trampoline_(nullptr),
621 nterp_trampoline_(nullptr),
622 image_pointer_size_(kRuntimePointerSize),
623 visibly_initialized_callback_lock_("visibly initialized callback lock"),
624 visibly_initialized_callback_(nullptr),
625 running_visibly_initialized_callbacks_(),
626 visibly_initialize_classes_with_membarier_(RegisterMemBarrierForClassInitialization()),
627 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
628 critical_native_code_with_clinit_check_(),
629 boot_image_jni_stubs_(JniStubKeyHash(Runtime::Current()->GetInstructionSet()),
630 JniStubKeyEquals(Runtime::Current()->GetInstructionSet())),
631 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
632 // For CHA disabled during Aot, see b/34193647.
633
634 CHECK(intern_table_ != nullptr);
635 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
636 "Array cache size wrong.");
637 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
638 }
639
CheckSystemClass(Thread * self,Handle<mirror::Class> c1,const char * descriptor)640 void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
641 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
642 if (c2 == nullptr) {
643 LOG(FATAL) << "Could not find class " << descriptor;
644 UNREACHABLE();
645 }
646 if (c1.Get() != c2) {
647 std::ostringstream os1, os2;
648 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
649 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
650 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
651 << ". This is most likely the result of a broken build. Make sure that "
652 << "libcore and art projects match.\n\n"
653 << os1.str() << "\n\n" << os2.str();
654 UNREACHABLE();
655 }
656 }
657
AllocIfTable(Thread * self,size_t ifcount,ObjPtr<mirror::Class> iftable_class)658 ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
659 size_t ifcount,
660 ObjPtr<mirror::Class> iftable_class)
661 REQUIRES_SHARED(Locks::mutator_lock_) {
662 DCHECK(iftable_class->IsArrayClass());
663 DCHECK(iftable_class->GetComponentType()->IsObjectClass());
664 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
665 mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
666 }
667
InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,std::string * error_msg)668 bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
669 std::string* error_msg) {
670 VLOG(startup) << "ClassLinker::Init";
671
672 Thread* const self = Thread::Current();
673 Runtime* const runtime = Runtime::Current();
674 gc::Heap* const heap = runtime->GetHeap();
675
676 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
677 CHECK(!init_done_);
678
679 // Use the pointer size from the runtime since we are probably creating the image.
680 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
681
682 // java_lang_Class comes first, it's needed for AllocClass
683 // The GC can't handle an object with a null class since we can't get the size of this object.
684 heap->IncrementDisableMovingGC(self);
685 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
686 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
687 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
688 // the incorrect result when comparing to-space vs from-space.
689 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
690 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
691 CHECK(java_lang_Class != nullptr);
692 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
693 java_lang_Class->SetClass(java_lang_Class.Get());
694 if (kUseBakerReadBarrier) {
695 java_lang_Class->AssertReadBarrierState();
696 }
697 java_lang_Class->SetClassSize(class_class_size);
698 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
699 heap->DecrementDisableMovingGC(self);
700 // AllocClass(ObjPtr<mirror::Class>) can now be used
701
702 // Class[] is used for reflection support.
703 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
704 Handle<mirror::Class> class_array_class(hs.NewHandle(
705 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
706 class_array_class->SetComponentType(java_lang_Class.Get());
707
708 // java_lang_Object comes next so that object_array_class can be created.
709 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
710 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
711 CHECK(java_lang_Object != nullptr);
712 // backfill Object as the super class of Class.
713 java_lang_Class->SetSuperClass(java_lang_Object.Get());
714 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
715
716 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
717 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
718 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
719 runtime->SetSentinel(heap->AllocNonMovableObject(self,
720 java_lang_Object.Get(),
721 java_lang_Object->GetObjectSize(),
722 VoidFunctor()));
723
724 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
725 if (kBitstringSubtypeCheckEnabled) {
726 // It might seem the lock here is unnecessary, however all the SubtypeCheck
727 // functions are annotated to require locks all the way down.
728 //
729 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
730 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
731 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
732 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
733 }
734
735 // Object[] next to hold class roots.
736 Handle<mirror::Class> object_array_class(hs.NewHandle(
737 AllocClass(self, java_lang_Class.Get(),
738 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
739 object_array_class->SetComponentType(java_lang_Object.Get());
740
741 // Setup java.lang.String.
742 //
743 // We make this class non-movable for the unlikely case where it were to be
744 // moved by a sticky-bit (minor) collection when using the Generational
745 // Concurrent Copying (CC) collector, potentially creating a stale reference
746 // in the `klass_` field of one of its instances allocated in the Large-Object
747 // Space (LOS) -- see the comment about the dirty card scanning logic in
748 // art::gc::collector::ConcurrentCopying::MarkingPhase.
749 Handle<mirror::Class> java_lang_String(hs.NewHandle(
750 AllocClass</* kMovable= */ false>(
751 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
752 java_lang_String->SetStringClass();
753 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
754
755 // Setup java.lang.ref.Reference.
756 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
757 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
758 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
759 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
760
761 // Create storage for root classes, save away our work so far (requires descriptors).
762 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
763 mirror::ObjectArray<mirror::Class>::Alloc(self,
764 object_array_class.Get(),
765 static_cast<int32_t>(ClassRoot::kMax)));
766 CHECK(!class_roots_.IsNull());
767 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
768 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
769 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
770 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
771 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
772 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
773
774 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
775 java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
776
777 // Create array interface entries to populate once we can load system classes.
778 object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
779 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
780
781 // Setup the primitive type classes.
782 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
783 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
784 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
785 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
786 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
787 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
788 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
789 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
790 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
791
792 // Allocate the primitive array classes. We need only the native pointer
793 // array at this point (int[] or long[], depending on architecture) but
794 // we shall perform the same setup steps for all primitive array classes.
795 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
796 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
797 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
798 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
799 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
800 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
801 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
802 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
803
804 // now that these are registered, we can use AllocClass() and AllocObjectArray
805
806 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
807 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
808 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
809 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
810 java_lang_DexCache->SetDexCacheClass();
811 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
812 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
813
814
815 // Setup dalvik.system.ClassExt
816 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
817 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
818 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
819 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
820
821 // Set up array classes for string, field, method
822 Handle<mirror::Class> object_array_string(hs.NewHandle(
823 AllocClass(self, java_lang_Class.Get(),
824 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
825 object_array_string->SetComponentType(java_lang_String.Get());
826 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
827
828 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
829 // Create runtime resolution and imt conflict methods.
830 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
831 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
832 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
833
834 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
835 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
836 // these roots.
837 if (boot_class_path.empty()) {
838 *error_msg = "Boot classpath is empty.";
839 return false;
840 }
841 for (auto& dex_file : boot_class_path) {
842 if (dex_file == nullptr) {
843 *error_msg = "Null dex file.";
844 return false;
845 }
846 AppendToBootClassPath(self, dex_file.get());
847 boot_dex_files_.push_back(std::move(dex_file));
848 }
849
850 // now we can use FindSystemClass
851
852 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
853 // we do not need friend classes or a publicly exposed setter.
854 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
855 if (!runtime->IsAotCompiler()) {
856 // We need to set up the generic trampolines since we don't have an image.
857 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
858 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
859 quick_resolution_trampoline_ = GetQuickResolutionStub();
860 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
861 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
862 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
863 nterp_trampoline_ = interpreter::GetNterpEntryPoint();
864 }
865
866 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
867 // We also need to immediately clear the finalizable flag for Object so that other classes are
868 // not erroneously marked as finalizable. (Object defines an empty finalizer, so that other
869 // classes can override it but it is not itself finalizable.)
870 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
871 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
872 CHECK(java_lang_Object->IsFinalizable());
873 java_lang_Object->ClearFinalizable();
874 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
875 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
876 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
877 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
878 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
879 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
880 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
881 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
882 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
883
884 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
885 // in class_table_.
886 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
887
888 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
889 // arrays - can't be done until Object has a vtable and component classes are loaded.
890 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
891 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
892 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
893 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
894 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
895 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
896 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
897 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
898 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
899 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
900 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
901
902 // Setup the single, global copy of "iftable".
903 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
904 CHECK(java_lang_Cloneable != nullptr);
905 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
906 CHECK(java_io_Serializable != nullptr);
907 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
908 // crawl up and explicitly list all of the supers as well.
909 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
910 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
911
912 // Check Class[] and Object[]'s interfaces.
913 CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
914 CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
915 CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
916 CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
917
918 CHECK_EQ(object_array_string.Get(),
919 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
920
921 // The Enum class declares a "final" finalize() method to prevent subclasses from introducing
922 // a finalizer but it is not itself consedered finalizable. Load the Enum class now and clear
923 // the finalizable flag to prevent subclasses from being marked as finalizable.
924 CHECK_EQ(LookupClass(self, "Ljava/lang/Enum;", /*class_loader=*/ nullptr), nullptr);
925 Handle<mirror::Class> java_lang_Enum = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Enum;"));
926 CHECK(java_lang_Enum->IsFinalizable());
927 java_lang_Enum->ClearFinalizable();
928
929 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
930
931 // Create java.lang.reflect.Proxy root.
932 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
933 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
934
935 // Create java.lang.reflect.Field.class root.
936 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
937 CHECK(class_root != nullptr);
938 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
939
940 // Create java.lang.reflect.Field array root.
941 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
942 CHECK(class_root != nullptr);
943 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
944
945 // Create java.lang.reflect.Constructor.class root and array root.
946 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
947 CHECK(class_root != nullptr);
948 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
949 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
950 CHECK(class_root != nullptr);
951 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
952
953 // Create java.lang.reflect.Method.class root and array root.
954 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
955 CHECK(class_root != nullptr);
956 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
957 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
958 CHECK(class_root != nullptr);
959 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
960
961 // Create java.lang.invoke.CallSite.class root
962 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
963 CHECK(class_root != nullptr);
964 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
965
966 // Create java.lang.invoke.MethodType.class root
967 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
968 CHECK(class_root != nullptr);
969 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
970
971 // Create java.lang.invoke.MethodHandleImpl.class root
972 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
973 CHECK(class_root != nullptr);
974 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
975 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
976
977 // Create java.lang.invoke.MethodHandles.Lookup.class root
978 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
979 CHECK(class_root != nullptr);
980 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
981
982 // Create java.lang.invoke.VarHandle.class root
983 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
984 CHECK(class_root != nullptr);
985 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
986
987 // Create java.lang.invoke.FieldVarHandle.class root
988 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
989 CHECK(class_root != nullptr);
990 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
991
992 // Create java.lang.invoke.StaticFieldVarHandle.class root
993 class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
994 CHECK(class_root != nullptr);
995 SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
996
997 // Create java.lang.invoke.ArrayElementVarHandle.class root
998 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
999 CHECK(class_root != nullptr);
1000 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
1001
1002 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
1003 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
1004 CHECK(class_root != nullptr);
1005 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
1006
1007 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
1008 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
1009 CHECK(class_root != nullptr);
1010 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
1011
1012 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
1013 CHECK(class_root != nullptr);
1014 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
1015
1016 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
1017 // finish initializing Reference class
1018 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
1019 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
1020 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
1021 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
1022 mirror::Reference::ClassSize(image_pointer_size_));
1023 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
1024 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1025 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
1026 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
1027 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1028 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
1029 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
1030 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1031 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
1032 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
1033 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
1034 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
1035
1036 // Setup the ClassLoader, verifying the object_size_.
1037 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
1038 class_root->SetClassLoaderClass();
1039 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
1040 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
1041
1042 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
1043 // java.lang.StackTraceElement as a convenience.
1044 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
1045 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
1046 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
1047 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
1048 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
1049 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
1050 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
1051 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
1052 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
1053
1054 // Create conflict tables that depend on the class linker.
1055 runtime->FixupConflictTables();
1056
1057 FinishInit(self);
1058
1059 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
1060
1061 return true;
1062 }
1063
CreateStringInitBindings(Thread * self,ClassLinker * class_linker)1064 static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1065 REQUIRES_SHARED(Locks::mutator_lock_) {
1066 // Find String.<init> -> StringFactory bindings.
1067 ObjPtr<mirror::Class> string_factory_class =
1068 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1069 CHECK(string_factory_class != nullptr);
1070 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
1071 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1072 // Update the primordial thread.
1073 self->InitStringEntryPoints();
1074 }
1075
FinishInit(Thread * self)1076 void ClassLinker::FinishInit(Thread* self) {
1077 VLOG(startup) << "ClassLinker::FinishInit entering";
1078
1079 CreateStringInitBindings(self, this);
1080
1081 // Let the heap know some key offsets into java.lang.ref instances
1082 // Note: we hard code the field indexes here rather than using FindInstanceField
1083 // as the types of the field can't be resolved prior to the runtime being
1084 // fully initialized
1085 StackHandleScope<3> hs(self);
1086 Handle<mirror::Class> java_lang_ref_Reference =
1087 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
1088 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1089 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
1090
1091 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
1092 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1093 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1094
1095 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
1096 CHECK_STREQ(queue->GetName(), "queue");
1097 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
1098
1099 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
1100 CHECK_STREQ(queueNext->GetName(), "queueNext");
1101 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
1102
1103 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
1104 CHECK_STREQ(referent->GetName(), "referent");
1105 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
1106
1107 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
1108 CHECK_STREQ(zombie->GetName(), "zombie");
1109 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
1110
1111 // ensure all class_roots_ are initialized
1112 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
1113 ClassRoot class_root = static_cast<ClassRoot>(i);
1114 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
1115 CHECK(klass != nullptr);
1116 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
1117 // note SetClassRoot does additional validation.
1118 // if possible add new checks there to catch errors early
1119 }
1120
1121 CHECK(GetArrayIfTable() != nullptr);
1122
1123 // disable the slow paths in FindClass and CreatePrimitiveClass now
1124 // that Object, Class, and Object[] are setup
1125 init_done_ = true;
1126
1127 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1128 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1129 // ensure that the class will be initialized.
1130 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
1131 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1132 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1133 // Strange, but don't crash.
1134 LOG(WARNING) << "Could not prepare StackOverflowError.";
1135 self->ClearException();
1136 }
1137 }
1138
1139 VLOG(startup) << "ClassLinker::FinishInit exiting";
1140 }
1141
EnsureRootInitialized(ClassLinker * class_linker,Thread * self,ObjPtr<mirror::Class> klass)1142 static void EnsureRootInitialized(ClassLinker* class_linker,
1143 Thread* self,
1144 ObjPtr<mirror::Class> klass)
1145 REQUIRES_SHARED(Locks::mutator_lock_) {
1146 if (!klass->IsVisiblyInitialized()) {
1147 DCHECK(!klass->IsArrayClass());
1148 DCHECK(!klass->IsPrimitive());
1149 StackHandleScope<1> hs(self);
1150 Handle<mirror::Class> h_class(hs.NewHandle(klass));
1151 if (!class_linker->EnsureInitialized(
1152 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
1153 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1154 << ": " << self->GetException()->Dump();
1155 }
1156 }
1157 }
1158
RunEarlyRootClinits(Thread * self)1159 void ClassLinker::RunEarlyRootClinits(Thread* self) {
1160 StackHandleScope<1u> hs(self);
1161 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1162 EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
1163 EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
1164 // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
1165 EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
1166
1167 WellKnownClasses::Init(self->GetJniEnv());
1168
1169 // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
1170 // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
1171 // and, when resizing, initializes the `System` class for `System.arraycopy()`
1172 // and `System.<clinit> creates a finalizable object.)
1173 EnsureRootInitialized(
1174 this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
1175 }
1176
RunRootClinits(Thread * self)1177 void ClassLinker::RunRootClinits(Thread* self) {
1178 StackHandleScope<1u> hs(self);
1179 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1180 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1181 EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
1182 }
1183
1184 // Make sure certain well-known classes are initialized. Note that well-known
1185 // classes are always in the boot image, so this code is primarily intended
1186 // for running without boot image but may be needed for boot image if the
1187 // AOT-initialization fails due to introduction of new code to `<clinit>`.
1188 ArtMethod* methods_of_classes_to_initialize[] = {
1189 // Initialize primitive boxing classes (avoid check at runtime).
1190 WellKnownClasses::java_lang_Boolean_valueOf,
1191 WellKnownClasses::java_lang_Byte_valueOf,
1192 WellKnownClasses::java_lang_Character_valueOf,
1193 WellKnownClasses::java_lang_Double_valueOf,
1194 WellKnownClasses::java_lang_Float_valueOf,
1195 WellKnownClasses::java_lang_Integer_valueOf,
1196 WellKnownClasses::java_lang_Long_valueOf,
1197 WellKnownClasses::java_lang_Short_valueOf,
1198 // Initialize `StackOverflowError`.
1199 WellKnownClasses::java_lang_StackOverflowError_init,
1200 // Ensure class loader classes are initialized (avoid check at runtime).
1201 // Superclass `ClassLoader` is a class root and already initialized above.
1202 // Superclass `BaseDexClassLoader` is initialized implicitly.
1203 WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
1204 WellKnownClasses::dalvik_system_DexClassLoader_init,
1205 WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
1206 WellKnownClasses::dalvik_system_PathClassLoader_init,
1207 WellKnownClasses::java_lang_BootClassLoader_init,
1208 // Ensure `Daemons` class is initialized (avoid check at runtime).
1209 WellKnownClasses::java_lang_Daemons_start,
1210 // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
1211 WellKnownClasses::java_lang_Thread_init,
1212 WellKnownClasses::java_lang_ThreadGroup_add,
1213 // Ensure reference classes are initialized (avoid check at runtime).
1214 // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
1215 WellKnownClasses::java_lang_ref_ReferenceQueue_add,
1216 // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
1217 WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
1218 // Ensure `Parameter` class is initialized (avoid check at runtime).
1219 WellKnownClasses::java_lang_reflect_Parameter_init,
1220 // Ensure `MethodHandles` and `MethodType` classes are initialized (avoid check at runtime).
1221 WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
1222 WellKnownClasses::java_lang_invoke_MethodType_makeImpl,
1223 // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
1224 WellKnownClasses::java_nio_DirectByteBuffer_init,
1225 // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
1226 WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
1227 // Ensure reflection annotation classes are initialized (avoid check at runtime).
1228 WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
1229 WellKnownClasses::libcore_reflect_AnnotationMember_init,
1230 // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
1231 // suppress class initialization error (say, due to OOM), so initialize it early.
1232 WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
1233 };
1234 for (ArtMethod* method : methods_of_classes_to_initialize) {
1235 EnsureRootInitialized(this, self, method->GetDeclaringClass());
1236 }
1237 ArtField* fields_of_classes_to_initialize[] = {
1238 // Ensure classes used by class loaders are initialized (avoid check at runtime).
1239 WellKnownClasses::dalvik_system_DexFile_cookie,
1240 WellKnownClasses::dalvik_system_DexPathList_dexElements,
1241 WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
1242 // Ensure `VMRuntime` is initialized (avoid check at runtime).
1243 WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
1244 // Initialize empty arrays needed by `StackOverflowError`.
1245 WellKnownClasses::java_util_Collections_EMPTY_LIST,
1246 WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
1247 // Initialize boxing caches needed by the compiler.
1248 WellKnownClasses::java_lang_Byte_ByteCache_cache,
1249 WellKnownClasses::java_lang_Character_CharacterCache_cache,
1250 WellKnownClasses::java_lang_Integer_IntegerCache_cache,
1251 WellKnownClasses::java_lang_Long_LongCache_cache,
1252 WellKnownClasses::java_lang_Short_ShortCache_cache,
1253 };
1254 for (ArtField* field : fields_of_classes_to_initialize) {
1255 EnsureRootInitialized(this, self, field->GetDeclaringClass());
1256 }
1257 }
1258
1259 ALWAYS_INLINE
ComputeMethodHash(ArtMethod * method)1260 static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1261 DCHECK(!method->IsRuntimeMethod());
1262 DCHECK(!method->IsProxyMethod());
1263 DCHECK(!method->IsObsolete());
1264 // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1265 // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1266 const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1267 const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1268 std::string_view name = dex_file.GetMethodNameView(method_id);
1269 return ComputeModifiedUtf8Hash(name);
1270 }
1271
1272 ALWAYS_INLINE
MethodSignatureEquals(ArtMethod * lhs,ArtMethod * rhs)1273 static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1274 REQUIRES_SHARED(Locks::mutator_lock_) {
1275 DCHECK(!lhs->IsRuntimeMethod());
1276 DCHECK(!lhs->IsProxyMethod());
1277 DCHECK(!lhs->IsObsolete());
1278 DCHECK(!rhs->IsRuntimeMethod());
1279 DCHECK(!rhs->IsProxyMethod());
1280 DCHECK(!rhs->IsObsolete());
1281 // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1282 // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1283 const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1284 const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1285 const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1286 const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1287 if (&lhs_dex_file == &rhs_dex_file) {
1288 return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1289 lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1290 } else {
1291 return
1292 lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1293 lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1294 }
1295 }
1296
InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,PointerSize pointer_size,ArrayRef<uint32_t> virtual_method_hashes)1297 static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1298 PointerSize pointer_size,
1299 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1300 REQUIRES_SHARED(Locks::mutator_lock_) {
1301 ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1302 DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1303 for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
1304 virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
1305 }
1306 }
1307
1308 struct TrampolineCheckData {
1309 const void* quick_resolution_trampoline;
1310 const void* quick_imt_conflict_trampoline;
1311 const void* quick_generic_jni_trampoline;
1312 const void* quick_to_interpreter_bridge_trampoline;
1313 const void* nterp_trampoline;
1314 PointerSize pointer_size;
1315 ArtMethod* m;
1316 bool error;
1317 };
1318
InitFromBootImage(std::string * error_msg)1319 bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1320 VLOG(startup) << __FUNCTION__ << " entering";
1321 CHECK(!init_done_);
1322
1323 Runtime* const runtime = Runtime::Current();
1324 Thread* const self = Thread::Current();
1325 gc::Heap* const heap = runtime->GetHeap();
1326 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1327 CHECK(!spaces.empty());
1328 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1329 image_pointer_size_ = image_header.GetPointerSize();
1330 if (UNLIKELY(image_pointer_size_ != PointerSize::k32 &&
1331 image_pointer_size_ != PointerSize::k64)) {
1332 *error_msg =
1333 StringPrintf("Invalid image pointer size: %u", static_cast<uint32_t>(image_pointer_size_));
1334 return false;
1335 }
1336 if (!runtime->IsAotCompiler()) {
1337 // Only the Aot compiler supports having an image with a different pointer size than the
1338 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1339 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
1340 if (image_pointer_size_ != kRuntimePointerSize) {
1341 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
1342 static_cast<size_t>(image_pointer_size_),
1343 sizeof(void*));
1344 return false;
1345 }
1346 }
1347 DCHECK(!runtime->HasResolutionMethod());
1348 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1349 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1350 runtime->SetImtUnimplementedMethod(
1351 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1352 runtime->SetCalleeSaveMethod(
1353 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1354 CalleeSaveType::kSaveAllCalleeSaves);
1355 runtime->SetCalleeSaveMethod(
1356 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1357 CalleeSaveType::kSaveRefsOnly);
1358 runtime->SetCalleeSaveMethod(
1359 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1360 CalleeSaveType::kSaveRefsAndArgs);
1361 runtime->SetCalleeSaveMethod(
1362 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1363 CalleeSaveType::kSaveEverything);
1364 runtime->SetCalleeSaveMethod(
1365 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1366 CalleeSaveType::kSaveEverythingForClinit);
1367 runtime->SetCalleeSaveMethod(
1368 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1369 CalleeSaveType::kSaveEverythingForSuspendCheck);
1370
1371 std::vector<const OatFile*> oat_files =
1372 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1373 DCHECK(!oat_files.empty());
1374 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
1375 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
1376 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
1377 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1378 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1379 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1380 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
1381 nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
1382 if (kIsDebugBuild) {
1383 // Check that the other images use the same trampoline.
1384 for (size_t i = 1; i < oat_files.size(); ++i) {
1385 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
1386 const void* ith_jni_dlsym_lookup_trampoline_ =
1387 ith_oat_header.GetJniDlsymLookupTrampoline();
1388 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1389 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
1390 const void* ith_quick_resolution_trampoline =
1391 ith_oat_header.GetQuickResolutionTrampoline();
1392 const void* ith_quick_imt_conflict_trampoline =
1393 ith_oat_header.GetQuickImtConflictTrampoline();
1394 const void* ith_quick_generic_jni_trampoline =
1395 ith_oat_header.GetQuickGenericJniTrampoline();
1396 const void* ith_quick_to_interpreter_bridge_trampoline =
1397 ith_oat_header.GetQuickToInterpreterBridge();
1398 const void* ith_nterp_trampoline =
1399 ith_oat_header.GetNterpTrampoline();
1400 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
1401 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
1402 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
1403 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1404 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
1405 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1406 ith_nterp_trampoline != nterp_trampoline_) {
1407 // Make sure that all methods in this image do not contain those trampolines as
1408 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1409 TrampolineCheckData data;
1410 data.error = false;
1411 data.pointer_size = GetImagePointerSize();
1412 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1413 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1414 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1415 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
1416 data.nterp_trampoline = ith_nterp_trampoline;
1417 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
1418 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1419 if (obj->IsClass()) {
1420 ObjPtr<mirror::Class> klass = obj->AsClass();
1421 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1422 const void* entrypoint =
1423 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1424 if (entrypoint == data.quick_resolution_trampoline ||
1425 entrypoint == data.quick_imt_conflict_trampoline ||
1426 entrypoint == data.quick_generic_jni_trampoline ||
1427 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1428 data.m = &m;
1429 data.error = true;
1430 return;
1431 }
1432 }
1433 }
1434 };
1435 spaces[i]->GetLiveBitmap()->Walk(visitor);
1436 if (data.error) {
1437 ArtMethod* m = data.m;
1438 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
1439 *error_msg = "Found an ArtMethod with a bad entrypoint";
1440 return false;
1441 }
1442 }
1443 }
1444 }
1445
1446 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
1447 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
1448 image_header.GetImageRoot(ImageHeader::kClassRoots)));
1449 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
1450
1451 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1452 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1453 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1454 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1455 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1456 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
1457
1458 // Boot class loader, use a null handle.
1459 if (!AddImageSpaces(ArrayRef<gc::space::ImageSpace*>(spaces),
1460 ScopedNullHandle<mirror::ClassLoader>(),
1461 /*context=*/nullptr,
1462 &boot_dex_files_,
1463 error_msg)) {
1464 return false;
1465 }
1466 // We never use AOT code for debuggable.
1467 if (!runtime->IsJavaDebuggable()) {
1468 for (gc::space::ImageSpace* space : spaces) {
1469 const ImageHeader& header = space->GetImageHeader();
1470 header.VisitJniStubMethods([&](ArtMethod* method)
1471 REQUIRES_SHARED(Locks::mutator_lock_) {
1472 const void* stub = method->GetOatMethodQuickCode(image_pointer_size_);
1473 boot_image_jni_stubs_.Put(std::make_pair(JniStubKey(method), stub));
1474 return method;
1475 }, space->Begin(), image_pointer_size_);
1476 }
1477 }
1478
1479 InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1480 image_pointer_size_,
1481 ArrayRef<uint32_t>(object_virtual_method_hashes_));
1482 FinishInit(self);
1483
1484 VLOG(startup) << __FUNCTION__ << " exiting";
1485 return true;
1486 }
1487
AddExtraBootDexFiles(Thread * self,std::vector<std::unique_ptr<const DexFile>> && additional_dex_files)1488 void ClassLinker::AddExtraBootDexFiles(
1489 Thread* self,
1490 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1491 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
1492 AppendToBootClassPath(self, dex_file.get());
1493 if (kIsDebugBuild) {
1494 for (const auto& boot_dex_file : boot_dex_files_) {
1495 DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1496 }
1497 }
1498 boot_dex_files_.push_back(std::move(dex_file));
1499 }
1500 }
1501
IsBootClassLoader(ObjPtr<mirror::Object> class_loader)1502 bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
1503 return class_loader == nullptr ||
1504 WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
1505 }
1506
1507 class CHAOnDeleteUpdateClassVisitor {
1508 public:
CHAOnDeleteUpdateClassVisitor(LinearAlloc * alloc)1509 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1510 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1511 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1512 self_(Thread::Current()) {}
1513
operator ()(ObjPtr<mirror::Class> klass)1514 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1515 // This class is going to be unloaded. Tell CHA about it.
1516 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1517 return true;
1518 }
1519 private:
1520 const LinearAlloc* allocator_;
1521 const ClassHierarchyAnalysis* cha_;
1522 const PointerSize pointer_size_;
1523 const Thread* self_;
1524 };
1525
1526 /*
1527 * A class used to ensure that all references to strings interned in an AppImage have been
1528 * properly recorded in the interned references list, and is only ever run in debug mode.
1529 */
1530 class CountInternedStringReferencesVisitor {
1531 public:
CountInternedStringReferencesVisitor(const gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1532 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1533 const InternTable::UnorderedSet& image_interns)
1534 : space_(space),
1535 image_interns_(image_interns),
1536 count_(0u) {}
1537
TestObject(ObjPtr<mirror::Object> referred_obj) const1538 void TestObject(ObjPtr<mirror::Object> referred_obj) const
1539 REQUIRES_SHARED(Locks::mutator_lock_) {
1540 if (referred_obj != nullptr &&
1541 space_.HasAddress(referred_obj.Ptr()) &&
1542 referred_obj->IsString()) {
1543 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
1544 uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1545 // All image strings have the hash code calculated, even if they are not interned.
1546 DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1547 auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
1548 if (it != image_interns_.end() && it->Read() == referred_str) {
1549 ++count_;
1550 }
1551 }
1552 }
1553
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1554 void VisitRootIfNonNull(
1555 mirror::CompressedReference<mirror::Object>* root) const
1556 REQUIRES_SHARED(Locks::mutator_lock_) {
1557 if (!root->IsNull()) {
1558 VisitRoot(root);
1559 }
1560 }
1561
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1562 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1563 REQUIRES_SHARED(Locks::mutator_lock_) {
1564 TestObject(root->AsMirrorPtr());
1565 }
1566
1567 // Visit Class Fields
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static) const1568 void operator()(ObjPtr<mirror::Object> obj,
1569 MemberOffset offset,
1570 [[maybe_unused]] bool is_static) const REQUIRES_SHARED(Locks::mutator_lock_) {
1571 // References within image or across images don't need a read barrier.
1572 ObjPtr<mirror::Object> referred_obj =
1573 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1574 TestObject(referred_obj);
1575 }
1576
operator ()(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Reference> ref) const1577 void operator()([[maybe_unused]] ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> ref) const
1578 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1579 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
1580 }
1581
GetCount() const1582 size_t GetCount() const {
1583 return count_;
1584 }
1585
1586 private:
1587 const gc::space::ImageSpace& space_;
1588 const InternTable::UnorderedSet& image_interns_;
1589 mutable size_t count_; // Modified from the `const` callbacks.
1590 };
1591
1592 /*
1593 * This function counts references to strings interned in the AppImage.
1594 * This is used in debug build to check against the number of the recorded references.
1595 */
CountInternedStringReferences(gc::space::ImageSpace & space,const InternTable::UnorderedSet & image_interns)1596 size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1597 const InternTable::UnorderedSet& image_interns)
1598 REQUIRES_SHARED(Locks::mutator_lock_) {
1599 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1600 const ImageHeader& image_header = space.GetImageHeader();
1601 const uint8_t* target_base = space.GetMemMap()->Begin();
1602 const ImageSection& objects_section = image_header.GetObjectsSection();
1603
1604 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1605 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
1606
1607 CountInternedStringReferencesVisitor visitor(space, image_interns);
1608 bitmap->VisitMarkedRange(objects_begin,
1609 objects_end,
1610 [&space, &visitor](mirror::Object* obj)
1611 REQUIRES_SHARED(Locks::mutator_lock_) {
1612 if (space.HasAddress(obj)) {
1613 if (obj->IsDexCache()) {
1614 obj->VisitReferences</* kVisitNativeRoots= */ true,
1615 kVerifyNone,
1616 kWithoutReadBarrier>(visitor, visitor);
1617 } else {
1618 // Don't visit native roots for non-dex-cache as they can't contain
1619 // native references to strings. This is verified during compilation
1620 // by ImageWriter::VerifyNativeGCRootInvariants.
1621 obj->VisitReferences</* kVisitNativeRoots= */ false,
1622 kVerifyNone,
1623 kWithoutReadBarrier>(visitor, visitor);
1624 }
1625 }
1626 });
1627 return visitor.GetCount();
1628 }
1629
1630 template <typename Visitor>
VisitInternedStringReferences(gc::space::ImageSpace * space,const Visitor & visitor)1631 static void VisitInternedStringReferences(
1632 gc::space::ImageSpace* space,
1633 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1634 const uint8_t* target_base = space->Begin();
1635 const ImageSection& sro_section =
1636 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1637 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1638
1639 VLOG(image)
1640 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1641 << num_string_offsets;
1642
1643 const auto* sro_base =
1644 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1645
1646 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1647 uint32_t base_offset = sro_base[offset_index].first;
1648
1649 uint32_t raw_member_offset = sro_base[offset_index].second;
1650 DCHECK_ALIGNED(base_offset, 2);
1651
1652 ObjPtr<mirror::Object> obj_ptr =
1653 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1654 if (obj_ptr->IsDexCache() && raw_member_offset >= sizeof(mirror::DexCache)) {
1655 // Special case for strings referenced from dex cache array: the offset is
1656 // actually decoded as an index into the dex cache string array.
1657 uint32_t index = raw_member_offset - sizeof(mirror::DexCache);
1658 mirror::GcRootArray<mirror::String>* array = obj_ptr->AsDexCache()->GetStringsArray();
1659 // The array could be concurrently set to null. See `StartupCompletedTask`.
1660 if (array != nullptr) {
1661 ObjPtr<mirror::String> referred_string = array->Get(index);
1662 DCHECK(referred_string != nullptr);
1663 ObjPtr<mirror::String> visited = visitor(referred_string);
1664 if (visited != referred_string) {
1665 array->Set(index, visited.Ptr());
1666 }
1667 }
1668 } else {
1669 DCHECK_ALIGNED(raw_member_offset, 2);
1670 MemberOffset member_offset(raw_member_offset);
1671 ObjPtr<mirror::String> referred_string =
1672 obj_ptr->GetFieldObject<mirror::String,
1673 kVerifyNone,
1674 kWithoutReadBarrier,
1675 /* kIsVolatile= */ false>(member_offset);
1676 DCHECK(referred_string != nullptr);
1677
1678 ObjPtr<mirror::String> visited = visitor(referred_string);
1679 if (visited != referred_string) {
1680 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1681 /* kCheckTransaction= */ false,
1682 kVerifyNone,
1683 /* kIsVolatile= */ false>(member_offset, visited);
1684 }
1685 }
1686 }
1687 }
1688
VerifyInternedStringReferences(gc::space::ImageSpace * space)1689 static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1690 REQUIRES_SHARED(Locks::mutator_lock_) {
1691 InternTable::UnorderedSet image_interns;
1692 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1693 if (section.Size() > 0) {
1694 size_t read_count;
1695 const uint8_t* data = space->Begin() + section.Offset();
1696 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1697 image_set.swap(image_interns);
1698 }
1699 size_t num_recorded_refs = 0u;
1700 VisitInternedStringReferences(
1701 space,
1702 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1703 REQUIRES_SHARED(Locks::mutator_lock_) {
1704 auto it = image_interns.find(GcRoot<mirror::String>(str));
1705 CHECK(it != image_interns.end());
1706 CHECK(it->Read() == str);
1707 ++num_recorded_refs;
1708 return str;
1709 });
1710 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1711 CHECK_EQ(num_recorded_refs, num_found_refs);
1712 }
1713
1714 // new_class_set is the set of classes that were read from the class table section in the image.
1715 // If there was no class table section, it is null.
1716 // Note: using a class here to avoid having to make ClassLinker internals public.
1717 class AppImageLoadingHelper {
1718 public:
1719 static void Update(
1720 ClassLinker* class_linker,
1721 gc::space::ImageSpace* space,
1722 Handle<mirror::ClassLoader> class_loader,
1723 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1724 REQUIRES(!Locks::dex_lock_)
1725 REQUIRES_SHARED(Locks::mutator_lock_);
1726
1727 static void HandleAppImageStrings(gc::space::ImageSpace* space)
1728 REQUIRES_SHARED(Locks::mutator_lock_);
1729 };
1730
Update(ClassLinker * class_linker,gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)1731 void AppImageLoadingHelper::Update(
1732 ClassLinker* class_linker,
1733 gc::space::ImageSpace* space,
1734 Handle<mirror::ClassLoader> class_loader,
1735 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
1736 REQUIRES(!Locks::dex_lock_)
1737 REQUIRES_SHARED(Locks::mutator_lock_) {
1738 ScopedTrace app_image_timing("AppImage:Updating");
1739
1740 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1741 // In debug build, verify the string references before applying
1742 // the Runtime::LoadAppImageStartupCache() option.
1743 VerifyInternedStringReferences(space);
1744 }
1745 DCHECK(class_loader.Get() != nullptr);
1746 Thread* const self = Thread::Current();
1747 Runtime* const runtime = Runtime::Current();
1748 gc::Heap* const heap = runtime->GetHeap();
1749 const ImageHeader& header = space->GetImageHeader();
1750 int32_t number_of_dex_cache_arrays_cleared = 0;
1751 {
1752 // Register dex caches with the class loader.
1753 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
1754 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1755 const DexFile* const dex_file = dex_cache->GetDexFile();
1756 {
1757 WriterMutexLock mu2(self, *Locks::dex_lock_);
1758 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
1759 if (runtime->GetStartupCompleted()) {
1760 number_of_dex_cache_arrays_cleared++;
1761 // Free up dex cache arrays that we would only allocate at startup.
1762 // We do this here before registering and within the lock to be
1763 // consistent with `StartupCompletedTask`.
1764 dex_cache->UnlinkStartupCaches();
1765 }
1766 VLOG(image) << "App image registers dex file " << dex_file->GetLocation();
1767 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
1768 }
1769 }
1770 }
1771 if (number_of_dex_cache_arrays_cleared == dex_caches->GetLength()) {
1772 // Free up dex cache arrays that we would only allocate at startup.
1773 // If `number_of_dex_cache_arrays_cleared` isn't the number of dex caches in
1774 // the image, then there is a race with the `StartupCompletedTask`, which
1775 // will release the space instead.
1776 space->ReleaseMetadata();
1777 }
1778
1779 if (ClassLinker::kAppImageMayContainStrings) {
1780 HandleAppImageStrings(space);
1781 }
1782
1783 if (kVerifyArtMethodDeclaringClasses) {
1784 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
1785 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
1786 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1787 header.VisitPackedArtMethods([&](ArtMethod& method)
1788 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1789 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1790 if (klass != nullptr) {
1791 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1792 }
1793 }, space->Begin(), kRuntimePointerSize);
1794 }
1795 }
1796
HandleAppImageStrings(gc::space::ImageSpace * space)1797 void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
1798 // Iterate over the string reference offsets stored in the image and intern
1799 // the strings they point to.
1800 ScopedTrace timing("AppImage:InternString");
1801
1802 Runtime* const runtime = Runtime::Current();
1803 InternTable* const intern_table = runtime->GetInternTable();
1804
1805 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1806 // for faster lookup.
1807 // TODO: Optimize with a bitmap or bloom filter
1808 SafeMap<mirror::String*, mirror::String*> intern_remap;
1809 auto func = [&](InternTable::UnorderedSet& interns)
1810 REQUIRES_SHARED(Locks::mutator_lock_)
1811 REQUIRES(Locks::intern_table_lock_) {
1812 const size_t non_boot_image_strings = intern_table->CountInterns(
1813 /*visit_boot_images=*/false,
1814 /*visit_non_boot_images=*/true);
1815 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
1816 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1817 // Visit the smaller of the two sets to compute the intersection.
1818 if (interns.size() < non_boot_image_strings) {
1819 for (auto it = interns.begin(); it != interns.end(); ) {
1820 ObjPtr<mirror::String> string = it->Read();
1821 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1822 if (existing == nullptr) {
1823 existing = intern_table->LookupStrongLocked(string);
1824 }
1825 if (existing != nullptr) {
1826 intern_remap.Put(string.Ptr(), existing.Ptr());
1827 it = interns.erase(it);
1828 } else {
1829 ++it;
1830 }
1831 }
1832 } else {
1833 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1834 REQUIRES_SHARED(Locks::mutator_lock_)
1835 REQUIRES(Locks::intern_table_lock_) {
1836 auto it = interns.find(root);
1837 if (it != interns.end()) {
1838 ObjPtr<mirror::String> existing = root.Read();
1839 intern_remap.Put(it->Read(), existing.Ptr());
1840 it = interns.erase(it);
1841 }
1842 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1843 }
1844 // Consistency check to ensure correctness.
1845 if (kIsDebugBuild) {
1846 for (GcRoot<mirror::String>& root : interns) {
1847 ObjPtr<mirror::String> string = root.Read();
1848 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1849 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
1850 }
1851 }
1852 };
1853 intern_table->AddImageStringsToTable(space, func);
1854 if (!intern_remap.empty()) {
1855 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
1856 VisitInternedStringReferences(
1857 space,
1858 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1859 auto it = intern_remap.find(str.Ptr());
1860 if (it != intern_remap.end()) {
1861 return ObjPtr<mirror::String>(it->second);
1862 }
1863 return str;
1864 });
1865 }
1866 }
1867
OpenOatDexFile(const OatFile * oat_file,const char * location,std::string * error_msg)1868 static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1869 const char* location,
1870 std::string* error_msg)
1871 REQUIRES_SHARED(Locks::mutator_lock_) {
1872 DCHECK(error_msg != nullptr);
1873 std::unique_ptr<const DexFile> dex_file;
1874 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, error_msg);
1875 if (oat_dex_file == nullptr) {
1876 return std::unique_ptr<const DexFile>();
1877 }
1878 std::string inner_error_msg;
1879 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1880 if (dex_file == nullptr) {
1881 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1882 location,
1883 oat_file->GetLocation().c_str(),
1884 inner_error_msg.c_str());
1885 return std::unique_ptr<const DexFile>();
1886 }
1887
1888 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1889 CHECK(dex_file->GetSha1() != oat_dex_file->GetSha1());
1890 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1891 location,
1892 dex_file->GetLocationChecksum(),
1893 oat_dex_file->GetDexFileLocationChecksum());
1894 return std::unique_ptr<const DexFile>();
1895 }
1896 CHECK(dex_file->GetSha1() == oat_dex_file->GetSha1());
1897 return dex_file;
1898 }
1899
OpenImageDexFiles(gc::space::ImageSpace * space,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1900 bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1901 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1902 std::string* error_msg) {
1903 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
1904 const ImageHeader& header = space->GetImageHeader();
1905 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1906 DCHECK(dex_caches_object != nullptr);
1907 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
1908 dex_caches_object->AsObjectArray<mirror::DexCache>();
1909 const OatFile* oat_file = space->GetOatFile();
1910 for (auto dex_cache : dex_caches->Iterate()) {
1911 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1912 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1913 dex_file_location.c_str(),
1914 error_msg);
1915 if (dex_file == nullptr) {
1916 return false;
1917 }
1918 dex_cache->SetDexFile(dex_file.get());
1919 out_dex_files->push_back(std::move(dex_file));
1920 }
1921 return true;
1922 }
1923
OpenAndInitImageDexFiles(const gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,std::vector<std::unique_ptr<const DexFile>> * out_dex_files,std::string * error_msg)1924 bool ClassLinker::OpenAndInitImageDexFiles(
1925 const gc::space::ImageSpace* space,
1926 Handle<mirror::ClassLoader> class_loader,
1927 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1928 std::string* error_msg) {
1929 DCHECK(out_dex_files != nullptr);
1930 const bool app_image = class_loader != nullptr;
1931 const ImageHeader& header = space->GetImageHeader();
1932 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
1933 DCHECK(dex_caches_object != nullptr);
1934 Thread* const self = Thread::Current();
1935 StackHandleScope<3> hs(self);
1936 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
1937 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
1938 const OatFile* oat_file = space->GetOatFile();
1939 if (oat_file->GetOatHeader().GetDexFileCount() !=
1940 static_cast<uint32_t>(dex_caches->GetLength())) {
1941 *error_msg =
1942 "Dex cache count and dex file count mismatch while trying to initialize from image";
1943 return false;
1944 }
1945
1946 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
1947 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
1948 std::unique_ptr<const DexFile> dex_file =
1949 OpenOatDexFile(oat_file, dex_file_location.c_str(), error_msg);
1950 if (dex_file == nullptr) {
1951 return false;
1952 }
1953
1954 {
1955 // Native fields are all null. Initialize them.
1956 WriterMutexLock mu(self, *Locks::dex_lock_);
1957 dex_cache->Initialize(dex_file.get(), class_loader.Get());
1958 }
1959 if (!app_image) {
1960 // Register dex files, keep track of existing ones that are conflicts.
1961 AppendToBootClassPath(dex_file.get(), dex_cache);
1962 }
1963 out_dex_files->push_back(std::move(dex_file));
1964 }
1965 return true;
1966 }
1967
1968 // Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1969 // together and caches some intermediate results.
1970 template <PointerSize kPointerSize>
1971 class ImageChecker final {
1972 public:
CheckObjects(gc::Heap * heap,gc::space::ImageSpace * space)1973 static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
1974 REQUIRES_SHARED(Locks::mutator_lock_) {
1975 // There can be no GC during boot image initialization, so we do not need read barriers.
1976 ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
1977
1978 CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
1979 const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
1980 uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
1981 uintptr_t objects_begin = space_begin + objects_section.Offset();
1982 uintptr_t objects_end = objects_begin + objects_section.Size();
1983 ImageChecker ic(heap);
1984 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1985 DCHECK(obj != nullptr);
1986 mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1987 CHECK(obj_klass != nullptr) << "Null class in object " << obj;
1988 mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1989 CHECK(class_class != nullptr) << "Null class class " << obj;
1990 if (obj_klass == class_class) {
1991 auto klass = obj->AsClass();
1992 for (ArtField& field : klass->GetIFields()) {
1993 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
1994 }
1995 for (ArtField& field : klass->GetSFields()) {
1996 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
1997 }
1998 for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
1999 ic.CheckArtMethod(&m, klass);
2000 }
2001 ObjPtr<mirror::PointerArray> vtable =
2002 klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2003 if (vtable != nullptr) {
2004 ic.CheckArtMethodPointerArray(vtable);
2005 }
2006 if (klass->ShouldHaveImt()) {
2007 ImTable* imt = klass->GetImt(kPointerSize);
2008 for (size_t i = 0; i < ImTable::kSize; ++i) {
2009 ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
2010 }
2011 }
2012 if (klass->ShouldHaveEmbeddedVTable()) {
2013 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
2014 ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
2015 /*expected_class=*/ nullptr);
2016 }
2017 }
2018 ObjPtr<mirror::IfTable> iftable =
2019 klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
2020 int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
2021 for (int32_t i = 0; i < iftable_count; ++i) {
2022 ObjPtr<mirror::PointerArray> method_array =
2023 iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
2024 if (method_array != nullptr) {
2025 ic.CheckArtMethodPointerArray(method_array);
2026 }
2027 }
2028 }
2029 };
2030 space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
2031 }
2032
2033 private:
ImageChecker(gc::Heap * heap)2034 explicit ImageChecker(gc::Heap* heap) {
2035 ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
2036 space_begin_.reserve(spaces.size());
2037 for (gc::space::ImageSpace* space : spaces) {
2038 CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
2039 space_begin_.push_back(space->Begin());
2040 }
2041 }
2042
CheckArtMethod(ArtMethod * m,ObjPtr<mirror::Class> expected_class)2043 void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
2044 REQUIRES_SHARED(Locks::mutator_lock_) {
2045 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
2046 if (m->IsRuntimeMethod()) {
2047 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
2048 } else if (m->IsCopied()) {
2049 CHECK(declaring_class != nullptr) << m->PrettyMethod();
2050 } else if (expected_class != nullptr) {
2051 CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
2052 }
2053 bool contains = false;
2054 for (const uint8_t* begin : space_begin_) {
2055 const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
2056 const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
2057 if (header->GetMethodsSection().Contains(offset) ||
2058 header->GetRuntimeMethodsSection().Contains(offset)) {
2059 contains = true;
2060 break;
2061 }
2062 }
2063 CHECK(contains) << m << " not found";
2064 }
2065
CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)2066 void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
2067 REQUIRES_SHARED(Locks::mutator_lock_) {
2068 CHECK(arr != nullptr);
2069 for (int32_t j = 0; j < arr->GetLength(); ++j) {
2070 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
2071 CHECK(method != nullptr);
2072 CheckArtMethod(method, /*expected_class=*/ nullptr);
2073 }
2074 }
2075
2076 std::vector<const uint8_t*> space_begin_;
2077 };
2078
VerifyAppImage(const ImageHeader & header,const Handle<mirror::ClassLoader> & class_loader,ClassTable * class_table,gc::space::ImageSpace * space)2079 static void VerifyAppImage(const ImageHeader& header,
2080 const Handle<mirror::ClassLoader>& class_loader,
2081 ClassTable* class_table,
2082 gc::space::ImageSpace* space)
2083 REQUIRES_SHARED(Locks::mutator_lock_) {
2084 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2085 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
2086 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
2087 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
2088 << mirror::Class::PrettyClass(klass);
2089 }
2090 }, space->Begin(), kRuntimePointerSize);
2091 {
2092 // Verify that all direct interfaces of classes in the class table are also resolved.
2093 std::vector<ObjPtr<mirror::Class>> classes;
2094 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
2095 REQUIRES_SHARED(Locks::mutator_lock_) {
2096 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
2097 classes.push_back(klass);
2098 }
2099 return true;
2100 };
2101 class_table->Visit(verify_direct_interfaces_in_table);
2102 for (ObjPtr<mirror::Class> klass : classes) {
2103 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
2104 CHECK(klass->GetDirectInterface(i) != nullptr)
2105 << klass->PrettyDescriptor() << " iface #" << i;
2106 }
2107 }
2108 }
2109 }
2110
AddImageSpace(gc::space::ImageSpace * space,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,const std::vector<std::unique_ptr<const DexFile>> & dex_files,std::string * error_msg)2111 bool ClassLinker::AddImageSpace(gc::space::ImageSpace* space,
2112 Handle<mirror::ClassLoader> class_loader,
2113 ClassLoaderContext* context,
2114 const std::vector<std::unique_ptr<const DexFile>>& dex_files,
2115 std::string* error_msg) {
2116 DCHECK(error_msg != nullptr);
2117 const uint64_t start_time = NanoTime();
2118 const bool app_image = class_loader != nullptr;
2119 const ImageHeader& header = space->GetImageHeader();
2120 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
2121 DCHECK(dex_caches_object != nullptr);
2122 Runtime* const runtime = Runtime::Current();
2123 gc::Heap* const heap = runtime->GetHeap();
2124 Thread* const self = Thread::Current();
2125 // Check that the image is what we are expecting.
2126 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
2127 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
2128 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
2129 static_cast<size_t>(image_pointer_size_));
2130 return false;
2131 }
2132 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
2133 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
2134 *error_msg = StringPrintf("Expected %zu image roots but got %d",
2135 expected_image_roots,
2136 header.GetImageRoots()->GetLength());
2137 return false;
2138 }
2139 StackHandleScope<3> hs(self);
2140 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2141 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2142 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2143 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
2144 MutableHandle<mirror::Object> special_root(hs.NewHandle(
2145 app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
2146 DCHECK(class_roots != nullptr);
2147 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
2148 *error_msg = StringPrintf("Expected %d class roots but got %d",
2149 class_roots->GetLength(),
2150 static_cast<int32_t>(ClassRoot::kMax));
2151 return false;
2152 }
2153 // Check against existing class roots to make sure they match the ones in the boot image.
2154 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2155 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2156 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
2157 *error_msg = "App image class roots must have pointer equality with runtime ones.";
2158 return false;
2159 }
2160 }
2161 const OatFile* oat_file = space->GetOatFile();
2162
2163 if (app_image) {
2164 ScopedAssertNoThreadSuspension sants("Checking app image");
2165 if (special_root == nullptr) {
2166 *error_msg = "Unexpected null special root in app image";
2167 return false;
2168 } else if (special_root->IsByteArray()) {
2169 OatHeader* oat_header = reinterpret_cast<OatHeader*>(special_root->AsByteArray()->GetData());
2170 if (!oat_header->IsValid()) {
2171 *error_msg = "Invalid oat header in special root";
2172 return false;
2173 }
2174 if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != oat_header->GetDexFileCount()) {
2175 *error_msg = "Checksums count does not match";
2176 return false;
2177 }
2178 if (oat_header->IsConcurrentCopying() != gUseReadBarrier) {
2179 *error_msg = "GCs do not match";
2180 return false;
2181 }
2182
2183 // Check if the dex checksums match the dex files that we just loaded.
2184 uint32_t* checksums = reinterpret_cast<uint32_t*>(
2185 reinterpret_cast<uint8_t*>(oat_header) + oat_header->GetHeaderSize());
2186 for (uint32_t i = 0; i < oat_header->GetDexFileCount(); ++i) {
2187 uint32_t dex_checksum = dex_files.at(i)->GetHeader().checksum_;
2188 if (checksums[i] != dex_checksum) {
2189 *error_msg = StringPrintf(
2190 "Image and dex file checksums did not match for %s: image has %d, dex file has %d",
2191 dex_files.at(i)->GetLocation().c_str(),
2192 checksums[i],
2193 dex_checksum);
2194 return false;
2195 }
2196 }
2197
2198 // Validate the class loader context.
2199 const char* stored_context = oat_header->GetStoreValueByKey(OatHeader::kClassPathKey);
2200 if (stored_context == nullptr) {
2201 *error_msg = "Missing class loader context in special root";
2202 return false;
2203 }
2204 if (context->VerifyClassLoaderContextMatch(stored_context) ==
2205 ClassLoaderContext::VerificationResult::kMismatch) {
2206 *error_msg = StringPrintf("Class loader contexts don't match: %s", stored_context);
2207 return false;
2208 }
2209
2210 // Validate the apex versions.
2211 if (!gc::space::ImageSpace::ValidateApexVersions(*oat_header,
2212 runtime->GetApexVersions(),
2213 space->GetImageLocation(),
2214 error_msg)) {
2215 return false;
2216 }
2217
2218 // Validate the boot classpath.
2219 const char* bcp = oat_header->GetStoreValueByKey(OatHeader::kBootClassPathKey);
2220 if (bcp == nullptr) {
2221 *error_msg = "Missing boot classpath in special root";
2222 return false;
2223 }
2224 std::string runtime_bcp = android::base::Join(runtime->GetBootClassPathLocations(), ':');
2225 if (strcmp(bcp, runtime_bcp.c_str()) != 0) {
2226 *error_msg = StringPrintf("Mismatch boot classpath: image has %s, runtime has %s",
2227 bcp,
2228 runtime_bcp.c_str());
2229 return false;
2230 }
2231
2232 // Validate the dex checksums of the boot classpath.
2233 const char* bcp_checksums =
2234 oat_header->GetStoreValueByKey(OatHeader::kBootClassPathChecksumsKey);
2235 if (bcp_checksums == nullptr) {
2236 *error_msg = "Missing boot classpath checksums in special root";
2237 return false;
2238 }
2239 if (strcmp(bcp_checksums, runtime->GetBootClassPathChecksums().c_str()) != 0) {
2240 *error_msg = StringPrintf("Mismatch boot classpath checksums: image has %s, runtime has %s",
2241 bcp_checksums,
2242 runtime->GetBootClassPathChecksums().c_str());
2243 return false;
2244 }
2245 } else if (IsBootClassLoader(special_root.Get())) {
2246 *error_msg = "Unexpected BootClassLoader in app image";
2247 return false;
2248 } else if (!special_root->IsClassLoader()) {
2249 *error_msg = "Unexpected special root in app image";
2250 return false;
2251 }
2252 }
2253
2254 if (kCheckImageObjects) {
2255 if (!app_image) {
2256 if (image_pointer_size_ == PointerSize::k64) {
2257 ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
2258 } else {
2259 ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
2260 }
2261 }
2262 }
2263
2264 // Set entry point to interpreter if in InterpretOnly mode.
2265 if (!runtime->IsAotCompiler() &&
2266 (runtime->GetInstrumentation()->InterpretOnly() ||
2267 runtime->IsJavaDebuggable())) {
2268 // Set image methods' entry point to interpreter.
2269 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2270 if (!method.IsRuntimeMethod()) {
2271 DCHECK(method.GetDeclaringClass() != nullptr);
2272 if (!method.IsNative() && !method.IsResolutionMethod()) {
2273 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2274 image_pointer_size_);
2275 }
2276 }
2277 }, space->Begin(), image_pointer_size_);
2278 }
2279
2280 if (!runtime->IsAotCompiler()) {
2281 // If the boot image is not loaded by the zygote, we don't need the shared
2282 // memory optimization.
2283 // If we are profiling the boot classpath, we disable the shared memory
2284 // optimization to make sure boot classpath methods all get properly
2285 // profiled.
2286 // For debuggable runtimes we don't use AOT code, so don't use shared memory
2287 // optimization so the methods can be JITed better.
2288 //
2289 // We need to disable the flag before doing ResetCounter below, as counters
2290 // of shared memory method always hold the "hot" value.
2291 if (!runtime->IsZygote() ||
2292 runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath() ||
2293 runtime->IsJavaDebuggable()) {
2294 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2295 method.ClearMemorySharedMethod();
2296 }, space->Begin(), image_pointer_size_);
2297 }
2298
2299 ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
2300 bool can_use_nterp = interpreter::CanRuntimeUseNterp();
2301 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
2302 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2303 // In the image, the `data` pointer field of the ArtMethod contains the code
2304 // item offset. Change this to the actual pointer to the code item.
2305 if (method.HasCodeItem()) {
2306 const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2307 reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
2308 method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
2309 // The hotness counter may have changed since we compiled the image, so
2310 // reset it with the runtime value.
2311 method.ResetCounter(hotness_threshold);
2312 }
2313 if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2314 if (can_use_nterp) {
2315 // Set image methods' entry point that point to the nterp trampoline to the
2316 // nterp entry point. This allows taking the fast path when doing a
2317 // nterp->nterp call.
2318 DCHECK(!method.StillNeedsClinitCheck());
2319 method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2320 } else {
2321 method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2322 }
2323 }
2324 }, space->Begin(), image_pointer_size_);
2325 }
2326
2327 if (runtime->IsVerificationSoftFail()) {
2328 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2329 if (method.IsManagedAndInvokable()) {
2330 method.ClearSkipAccessChecks();
2331 }
2332 }, space->Begin(), image_pointer_size_);
2333 }
2334
2335 ClassTable* class_table = nullptr;
2336 {
2337 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2338 class_table = InsertClassTableForClassLoader(class_loader.Get());
2339 }
2340 // If we have a class table section, read it and use it for verification in
2341 // UpdateAppImageClassLoadersAndDexCaches.
2342 ClassTable::ClassSet temp_set;
2343 const ImageSection& class_table_section = header.GetClassTableSection();
2344 const bool added_class_table = class_table_section.Size() > 0u;
2345 if (added_class_table) {
2346 const uint64_t start_time2 = NanoTime();
2347 size_t read_count = 0;
2348 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2349 /*make copy*/false,
2350 &read_count);
2351 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
2352 }
2353 if (app_image) {
2354 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
2355
2356 {
2357 ScopedTrace trace("AppImage:UpdateClassLoaders");
2358 // Update class loader and resolved strings. If added_class_table is false, the resolved
2359 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
2360 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
2361 for (const ClassTable::TableSlot& root : temp_set) {
2362 // Note: We probably don't need the read barrier unless we copy the app image objects into
2363 // the region space.
2364 ObjPtr<mirror::Class> klass(root.Read());
2365 // Do not update class loader for boot image classes where the app image
2366 // class loader is only the initiating loader but not the defining loader.
2367 if (space->HasAddress(klass.Ptr())) {
2368 klass->SetClassLoader(loader);
2369 } else {
2370 DCHECK(klass->IsBootStrapClassLoaded());
2371 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass.Ptr()));
2372 }
2373 }
2374 }
2375
2376 if (kBitstringSubtypeCheckEnabled) {
2377 // Every class in the app image has initially SubtypeCheckInfo in the
2378 // Uninitialized state.
2379 //
2380 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2381 // after class initialization is complete. The app image ClassStatus as-is
2382 // are almost all ClassStatus::Initialized, and being in the
2383 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2384 //
2385 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2386 //
2387 // See also ImageWriter::FixupClass.
2388 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
2389 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2390 for (const ClassTable::TableSlot& root : temp_set) {
2391 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
2392 }
2393 }
2394 }
2395 if (!oat_file->GetBssGcRoots().empty()) {
2396 // Insert oat file to class table for visiting .bss GC roots.
2397 class_table->InsertOatFile(oat_file);
2398 }
2399
2400 if (added_class_table) {
2401 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2402 class_table->AddClassSet(std::move(temp_set));
2403 }
2404
2405 if (kIsDebugBuild && app_image) {
2406 // This verification needs to happen after the classes have been added to the class loader.
2407 // Since it ensures classes are in the class table.
2408 ScopedTrace trace("AppImage:Verify");
2409 VerifyAppImage(header, class_loader, class_table, space);
2410 }
2411
2412 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
2413 return true;
2414 }
2415
AddImageSpaces(ArrayRef<gc::space::ImageSpace * > spaces,Handle<mirror::ClassLoader> class_loader,ClassLoaderContext * context,std::vector<std::unique_ptr<const DexFile>> * dex_files,std::string * error_msg)2416 bool ClassLinker::AddImageSpaces(ArrayRef<gc::space::ImageSpace*> spaces,
2417 Handle<mirror::ClassLoader> class_loader,
2418 ClassLoaderContext* context,
2419 /*out*/ std::vector<std::unique_ptr<const DexFile>>* dex_files,
2420 /*out*/ std::string* error_msg) {
2421 std::vector<std::vector<std::unique_ptr<const DexFile>>> dex_files_by_space_index;
2422 for (const gc::space::ImageSpace* space : spaces) {
2423 std::vector<std::unique_ptr<const DexFile>> space_dex_files;
2424 if (!OpenAndInitImageDexFiles(space, class_loader, /*out*/ &space_dex_files, error_msg)) {
2425 return false;
2426 }
2427 dex_files_by_space_index.push_back(std::move(space_dex_files));
2428 }
2429 // This must be done in a separate loop after all dex files are initialized because there can be
2430 // references from an image space to another image space that comes after it.
2431 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
2432 std::vector<std::unique_ptr<const DexFile>>& space_dex_files = dex_files_by_space_index[i];
2433 if (!AddImageSpace(spaces[i], class_loader, context, space_dex_files, error_msg)) {
2434 return false;
2435 }
2436 // Append opened dex files at the end.
2437 std::move(space_dex_files.begin(), space_dex_files.end(), std::back_inserter(*dex_files));
2438 }
2439 return true;
2440 }
2441
VisitClassRoots(RootVisitor * visitor,VisitRootFlags flags)2442 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
2443 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2444 // enabling tracing requires the mutator lock, there are no race conditions here.
2445 const bool tracing_enabled = Trace::IsTracingEnabled();
2446 Thread* const self = Thread::Current();
2447 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2448 if (gUseReadBarrier) {
2449 // We do not track new roots for CC.
2450 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2451 kVisitRootFlagClearRootLog |
2452 kVisitRootFlagStartLoggingNewRoots |
2453 kVisitRootFlagStopLoggingNewRoots));
2454 }
2455 if ((flags & kVisitRootFlagAllRoots) != 0) {
2456 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2457 // There is 3 GC cases to handle:
2458 // Non moving concurrent:
2459 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
2460 // live by the class and class roots.
2461 //
2462 // Moving non-concurrent:
2463 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2464 // To prevent missing roots, this case needs to ensure that there is no
2465 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2466 // class which is in the class table.
2467 //
2468 // Moving concurrent:
2469 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2470 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
2471 //
2472 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2473 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2474 // these objects.
2475 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
2476 boot_class_table_->VisitRoots(root_visitor);
2477 // If tracing is enabled, then mark all the class loaders to prevent unloading.
2478 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
2479 for (const ClassLoaderData& data : class_loaders_) {
2480 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2481 root.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2482 }
2483 }
2484 } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
2485 for (auto& root : new_roots_) {
2486 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2487 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2488 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2489 // Concurrent moving GC marked new roots through the to-space invariant.
2490 DCHECK_EQ(new_ref, old_ref);
2491 }
2492 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2493 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2494 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2495 if (old_ref != nullptr) {
2496 DCHECK(old_ref->IsClass() || old_ref->IsString());
2497 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2498 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2499 // Concurrent moving GC marked new roots through the to-space invariant.
2500 DCHECK_EQ(new_ref, old_ref);
2501 }
2502 }
2503 }
2504 }
2505 if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
2506 new_roots_.clear();
2507 new_bss_roots_boot_oat_files_.clear();
2508 }
2509 if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
2510 log_new_roots_ = true;
2511 } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
2512 log_new_roots_ = false;
2513 }
2514 // We deliberately ignore the class roots in the image since we
2515 // handle image roots by using the MS/CMS rescanning of dirty cards.
2516 }
2517
2518 // Keep in sync with InitCallback. Anything we visit, we need to
2519 // reinit references to when reinitializing a ClassLinker from a
2520 // mapped image.
VisitRoots(RootVisitor * visitor,VisitRootFlags flags,bool visit_class_roots)2521 void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags, bool visit_class_roots) {
2522 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
2523 if (visit_class_roots) {
2524 VisitClassRoots(visitor, flags);
2525 }
2526 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2527 // unloading if we are marking roots.
2528 DropFindArrayClassCache();
2529 }
2530
2531 class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2532 public:
VisitClassLoaderClassesVisitor(ClassVisitor * visitor)2533 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2534 : visitor_(visitor),
2535 done_(false) {}
2536
Visit(ObjPtr<mirror::ClassLoader> class_loader)2537 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
2538 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
2539 ClassTable* const class_table = class_loader->GetClassTable();
2540 if (!done_ && class_table != nullptr) {
2541 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2542 if (!class_table->Visit(visitor)) {
2543 // If the visitor ClassTable returns false it means that we don't need to continue.
2544 done_ = true;
2545 }
2546 }
2547 }
2548
2549 private:
2550 // Class visitor that limits the class visits from a ClassTable to the classes with
2551 // the provided defining class loader. This filter is used to avoid multiple visits
2552 // of the same class which can be recorded for multiple initiating class loaders.
2553 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2554 public:
DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,ClassVisitor * visitor)2555 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2556 ClassVisitor* visitor)
2557 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2558
operator ()(ObjPtr<mirror::Class> klass)2559 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2560 if (klass->GetClassLoader() != defining_class_loader_) {
2561 return true;
2562 }
2563 return (*visitor_)(klass);
2564 }
2565
2566 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
2567 ClassVisitor* const visitor_;
2568 };
2569
2570 ClassVisitor* const visitor_;
2571 // If done is true then we don't need to do any more visiting.
2572 bool done_;
2573 };
2574
VisitClassesInternal(ClassVisitor * visitor)2575 void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
2576 if (boot_class_table_->Visit(*visitor)) {
2577 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2578 VisitClassLoaders(&loader_visitor);
2579 }
2580 }
2581
VisitClasses(ClassVisitor * visitor)2582 void ClassLinker::VisitClasses(ClassVisitor* visitor) {
2583 Thread* const self = Thread::Current();
2584 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2585 // Not safe to have thread suspension when we are holding a lock.
2586 if (self != nullptr) {
2587 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2588 VisitClassesInternal(visitor);
2589 } else {
2590 VisitClassesInternal(visitor);
2591 }
2592 }
2593
2594 class GetClassesInToVector : public ClassVisitor {
2595 public:
operator ()(ObjPtr<mirror::Class> klass)2596 bool operator()(ObjPtr<mirror::Class> klass) override {
2597 classes_.push_back(klass);
2598 return true;
2599 }
2600 std::vector<ObjPtr<mirror::Class>> classes_;
2601 };
2602
2603 class GetClassInToObjectArray : public ClassVisitor {
2604 public:
GetClassInToObjectArray(mirror::ObjectArray<mirror::Class> * arr)2605 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2606 : arr_(arr), index_(0) {}
2607
operator ()(ObjPtr<mirror::Class> klass)2608 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
2609 ++index_;
2610 if (index_ <= arr_->GetLength()) {
2611 arr_->Set(index_ - 1, klass);
2612 return true;
2613 }
2614 return false;
2615 }
2616
Succeeded() const2617 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
2618 return index_ <= arr_->GetLength();
2619 }
2620
2621 private:
2622 mirror::ObjectArray<mirror::Class>* const arr_;
2623 int32_t index_;
2624 };
2625
VisitClassesWithoutClassesLock(ClassVisitor * visitor)2626 void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
2627 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2628 // is avoiding duplicates.
2629 if (!kMovingClasses) {
2630 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
2631 GetClassesInToVector accumulator;
2632 VisitClasses(&accumulator);
2633 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
2634 if (!visitor->operator()(klass)) {
2635 return;
2636 }
2637 }
2638 } else {
2639 Thread* const self = Thread::Current();
2640 StackHandleScope<1> hs(self);
2641 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
2642 // We size the array assuming classes won't be added to the class table during the visit.
2643 // If this assumption fails we iterate again.
2644 while (true) {
2645 size_t class_table_size;
2646 {
2647 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2648 // Add 100 in case new classes get loaded when we are filling in the object array.
2649 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
2650 }
2651 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
2652 classes.Assign(
2653 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
2654 CHECK(classes != nullptr); // OOME.
2655 GetClassInToObjectArray accumulator(classes.Get());
2656 VisitClasses(&accumulator);
2657 if (accumulator.Succeeded()) {
2658 break;
2659 }
2660 }
2661 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2662 // If the class table shrank during creation of the clases array we expect null elements. If
2663 // the class table grew then the loop repeats. If classes are created after the loop has
2664 // finished then we don't visit.
2665 ObjPtr<mirror::Class> klass = classes->Get(i);
2666 if (klass != nullptr && !visitor->operator()(klass)) {
2667 return;
2668 }
2669 }
2670 }
2671 }
2672
~ClassLinker()2673 ClassLinker::~ClassLinker() {
2674 Thread* const self = Thread::Current();
2675 for (const ClassLoaderData& data : class_loaders_) {
2676 // CHA unloading analysis is not needed. No negative consequences are expected because
2677 // all the classloaders are deleted at the same time.
2678 PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/false);
2679 }
2680 for (const ClassLoaderData& data : class_loaders_) {
2681 delete data.allocator;
2682 delete data.class_table;
2683 }
2684 class_loaders_.clear();
2685 while (!running_visibly_initialized_callbacks_.empty()) {
2686 std::unique_ptr<VisiblyInitializedCallback> callback(
2687 std::addressof(running_visibly_initialized_callbacks_.front()));
2688 running_visibly_initialized_callbacks_.pop_front();
2689 }
2690 }
2691
PrepareToDeleteClassLoader(Thread * self,const ClassLoaderData & data,bool cleanup_cha)2692 void ClassLinker::PrepareToDeleteClassLoader(Thread* self,
2693 const ClassLoaderData& data,
2694 bool cleanup_cha) {
2695 Runtime* const runtime = Runtime::Current();
2696 JavaVMExt* const vm = runtime->GetJavaVM();
2697 vm->DeleteWeakGlobalRef(self, data.weak_root);
2698 // Notify the JIT that we need to remove the methods and/or profiling info.
2699 if (runtime->GetJit() != nullptr) {
2700 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2701 if (code_cache != nullptr) {
2702 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
2703 code_cache->RemoveMethodsIn(self, *data.allocator);
2704 }
2705 } else if (cha_ != nullptr) {
2706 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
2707 cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
2708 }
2709 // Cleanup references to single implementation ArtMethods that will be deleted.
2710 if (cleanup_cha) {
2711 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
2712 data.class_table->Visit<kWithoutReadBarrier>(visitor);
2713 }
2714 {
2715 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2716 auto end = critical_native_code_with_clinit_check_.end();
2717 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2718 if (data.allocator->ContainsUnsafe(it->first)) {
2719 it = critical_native_code_with_clinit_check_.erase(it);
2720 } else {
2721 ++it;
2722 }
2723 }
2724 }
2725 }
2726
AllocPointerArray(Thread * self,size_t length)2727 ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2728 return ObjPtr<mirror::PointerArray>::DownCast(
2729 image_pointer_size_ == PointerSize::k64
2730 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2731 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
2732 }
2733
AllocDexCache(Thread * self,const DexFile & dex_file)2734 ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
2735 StackHandleScope<1> hs(self);
2736 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
2737 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
2738 if (dex_cache == nullptr) {
2739 self->AssertPendingOOMException();
2740 return nullptr;
2741 }
2742 // Use InternWeak() so that the location String can be collected when the ClassLoader
2743 // with this DexCache is collected.
2744 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
2745 if (location == nullptr) {
2746 self->AssertPendingOOMException();
2747 return nullptr;
2748 }
2749 dex_cache->SetLocation(location);
2750 return dex_cache.Get();
2751 }
2752
AllocAndInitializeDexCache(Thread * self,const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)2753 ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2754 Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2755 StackHandleScope<1> hs(self);
2756 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
2757 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
2758 if (dex_cache != nullptr) {
2759 WriterMutexLock mu(self, *Locks::dex_lock_);
2760 dex_cache->Initialize(&dex_file, h_class_loader.Get());
2761 }
2762 return dex_cache;
2763 }
2764
2765 template <bool kMovable, typename PreFenceVisitor>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size,const PreFenceVisitor & pre_fence_visitor)2766 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2767 ObjPtr<mirror::Class> java_lang_Class,
2768 uint32_t class_size,
2769 const PreFenceVisitor& pre_fence_visitor) {
2770 DCHECK_GE(class_size, sizeof(mirror::Class));
2771 gc::Heap* heap = Runtime::Current()->GetHeap();
2772 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
2773 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2774 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
2775 if (UNLIKELY(k == nullptr)) {
2776 self->AssertPendingOOMException();
2777 return nullptr;
2778 }
2779 return k->AsClass();
2780 }
2781
2782 template <bool kMovable>
AllocClass(Thread * self,ObjPtr<mirror::Class> java_lang_Class,uint32_t class_size)2783 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2784 ObjPtr<mirror::Class> java_lang_Class,
2785 uint32_t class_size) {
2786 mirror::Class::InitializeClassVisitor visitor(class_size);
2787 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2788 }
2789
AllocClass(Thread * self,uint32_t class_size)2790 ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
2791 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
2792 }
2793
AllocPrimitiveArrayClass(Thread * self,ClassRoot primitive_root,ClassRoot array_root)2794 void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2795 ClassRoot primitive_root,
2796 ClassRoot array_root) {
2797 // We make this class non-movable for the unlikely case where it were to be
2798 // moved by a sticky-bit (minor) collection when using the Generational
2799 // Concurrent Copying (CC) collector, potentially creating a stale reference
2800 // in the `klass_` field of one of its instances allocated in the Large-Object
2801 // Space (LOS) -- see the comment about the dirty card scanning logic in
2802 // art::gc::collector::ConcurrentCopying::MarkingPhase.
2803 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2804 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2805 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2806 DCHECK(component_type->IsPrimitive());
2807 array_class->SetComponentType(component_type);
2808 SetClassRoot(array_root, array_class);
2809 }
2810
FinishArrayClassSetup(ObjPtr<mirror::Class> array_class)2811 void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2812 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2813 array_class->SetSuperClass(java_lang_Object);
2814 array_class->SetVTable(java_lang_Object->GetVTable());
2815 array_class->SetPrimitiveType(Primitive::kPrimNot);
2816 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2817 array_class->SetClassFlags(component_type->IsPrimitive()
2818 ? mirror::kClassFlagNoReferenceFields
2819 : mirror::kClassFlagObjectArray);
2820 array_class->SetClassLoader(component_type->GetClassLoader());
2821 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2822 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2823 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2824 array_class->SetImt(object_imt, image_pointer_size_);
2825 DCHECK_EQ(array_class->NumMethods(), 0u);
2826
2827 // don't need to set new_class->SetObjectSize(..)
2828 // because Object::SizeOf delegates to Array::SizeOf
2829
2830 // All arrays have java/lang/Cloneable and java/io/Serializable as
2831 // interfaces. We need to set that up here, so that stuff like
2832 // "instanceof" works right.
2833
2834 // Use the single, global copies of "interfaces" and "iftable"
2835 // (remember not to free them for arrays).
2836 {
2837 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2838 CHECK(array_iftable != nullptr);
2839 array_class->SetIfTable(array_iftable);
2840 }
2841
2842 // Inherit access flags from the component type.
2843 int access_flags = component_type->GetAccessFlags();
2844 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2845 access_flags &= kAccJavaFlagsMask;
2846 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2847 // and remove "interface".
2848 access_flags |= kAccAbstract | kAccFinal;
2849 access_flags &= ~kAccInterface;
2850
2851 array_class->SetAccessFlagsDuringLinking(access_flags);
2852
2853 // Array classes are fully initialized either during single threaded startup,
2854 // or from a pre-fence visitor, so visibly initialized.
2855 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
2856 }
2857
FinishCoreArrayClassSetup(ClassRoot array_root)2858 void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2859 // Do not hold lock on the array class object, the initialization of
2860 // core array classes is done while the process is still single threaded.
2861 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2862 FinishArrayClassSetup(array_class);
2863
2864 std::string temp;
2865 const char* descriptor = array_class->GetDescriptor(&temp);
2866 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2867 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2868 CHECK(existing == nullptr);
2869 }
2870
AllocStackTraceElementArray(Thread * self,size_t length)2871 ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
2872 Thread* self,
2873 size_t length) {
2874 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
2875 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
2876 }
2877
EnsureResolved(Thread * self,const char * descriptor,ObjPtr<mirror::Class> klass)2878 ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2879 const char* descriptor,
2880 ObjPtr<mirror::Class> klass) {
2881 DCHECK(klass != nullptr);
2882 if (kIsDebugBuild) {
2883 StackHandleScope<1> hs(self);
2884 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2885 Thread::PoisonObjectPointersIfDebug();
2886 }
2887
2888 // For temporary classes we must wait for them to be retired.
2889 if (init_done_ && klass->IsTemp()) {
2890 CHECK(!klass->IsResolved());
2891 if (klass->IsErroneousUnresolved()) {
2892 ThrowEarlierClassFailure(klass);
2893 return nullptr;
2894 }
2895 StackHandleScope<1> hs(self);
2896 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2897 ObjectLock<mirror::Class> lock(self, h_class);
2898 // Loop and wait for the resolving thread to retire this class.
2899 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
2900 lock.WaitIgnoringInterrupts();
2901 }
2902 if (h_class->IsErroneousUnresolved()) {
2903 ThrowEarlierClassFailure(h_class.Get());
2904 return nullptr;
2905 }
2906 CHECK(h_class->IsRetired());
2907 // Get the updated class from class table.
2908 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
2909 }
2910
2911 // Wait for the class if it has not already been linked.
2912 size_t index = 0;
2913 // Maximum number of yield iterations until we start sleeping.
2914 static const size_t kNumYieldIterations = 1000;
2915 // How long each sleep is in us.
2916 static const size_t kSleepDurationUS = 1000; // 1 ms.
2917 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
2918 StackHandleScope<1> hs(self);
2919 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
2920 {
2921 ObjectTryLock<mirror::Class> lock(self, h_class);
2922 // Can not use a monitor wait here since it may block when returning and deadlock if another
2923 // thread has locked klass.
2924 if (lock.Acquired()) {
2925 // Check for circular dependencies between classes, the lock is required for SetStatus.
2926 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2927 ThrowClassCircularityError(h_class.Get());
2928 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
2929 return nullptr;
2930 }
2931 }
2932 }
2933 {
2934 // Handle wrapper deals with klass moving.
2935 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
2936 if (index < kNumYieldIterations) {
2937 sched_yield();
2938 } else {
2939 usleep(kSleepDurationUS);
2940 }
2941 }
2942 ++index;
2943 }
2944
2945 if (klass->IsErroneousUnresolved()) {
2946 ThrowEarlierClassFailure(klass);
2947 return nullptr;
2948 }
2949 // Return the loaded class. No exceptions should be pending.
2950 CHECK(klass->IsResolved()) << klass->PrettyClass();
2951 self->AssertNoPendingException();
2952 return klass;
2953 }
2954
2955 using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
2956
2957 // Search a collection of DexFiles for a descriptor
FindInClassPath(const char * descriptor,size_t hash,const std::vector<const DexFile * > & class_path)2958 ClassPathEntry FindInClassPath(const char* descriptor,
2959 size_t hash, const std::vector<const DexFile*>& class_path) {
2960 for (const DexFile* dex_file : class_path) {
2961 DCHECK(dex_file != nullptr);
2962 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
2963 if (dex_class_def != nullptr) {
2964 return ClassPathEntry(dex_file, dex_class_def);
2965 }
2966 }
2967 return ClassPathEntry(nullptr, nullptr);
2968 }
2969
2970 // Helper macro to make sure each class loader lookup call handles the case the
2971 // class loader is not recognized, or the lookup threw an exception.
2972 #define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2973 do { \
2974 auto local_call = call_; \
2975 if (!local_call) { \
2976 return false; \
2977 } \
2978 auto local_result = result_; \
2979 if (local_result != nullptr) { \
2980 return true; \
2981 } \
2982 auto local_thread = thread_; \
2983 if (local_thread->IsExceptionPending()) { \
2984 /* Pending exception means there was an error other than */ \
2985 /* ClassNotFound that must be returned to the caller. */ \
2986 return false; \
2987 } \
2988 } while (0)
2989
FindClassInSharedLibraries(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)2990 bool ClassLinker::FindClassInSharedLibraries(Thread* self,
2991 const char* descriptor,
2992 size_t hash,
2993 Handle<mirror::ClassLoader> class_loader,
2994 /*out*/ ObjPtr<mirror::Class>* result) {
2995 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
2996 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
2997 }
2998
FindClassInSharedLibrariesHelper(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ArtField * field,ObjPtr<mirror::Class> * result)2999 bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
3000 const char* descriptor,
3001 size_t hash,
3002 Handle<mirror::ClassLoader> class_loader,
3003 ArtField* field,
3004 /*out*/ ObjPtr<mirror::Class>* result) {
3005 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
3006 if (raw_shared_libraries == nullptr) {
3007 return true;
3008 }
3009
3010 StackHandleScope<2> hs(self);
3011 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
3012 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
3013 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
3014 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
3015 temp_loader.Assign(loader);
3016 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3017 FindClassInBaseDexClassLoader(self, descriptor, hash, temp_loader, result),
3018 *result,
3019 self);
3020 }
3021 return true;
3022 }
3023
FindClassInSharedLibrariesAfter(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3024 bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
3025 const char* descriptor,
3026 size_t hash,
3027 Handle<mirror::ClassLoader> class_loader,
3028 /*out*/ ObjPtr<mirror::Class>* result) {
3029 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
3030 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
3031 }
3032
FindClassInBaseDexClassLoader(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3033 bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
3034 const char* descriptor,
3035 size_t hash,
3036 Handle<mirror::ClassLoader> class_loader,
3037 /*out*/ ObjPtr<mirror::Class>* result) {
3038 // Termination case: boot class loader.
3039 if (IsBootClassLoader(class_loader.Get())) {
3040 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3041 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
3042 return true;
3043 }
3044
3045 if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
3046 // For regular path or dex class loader the search order is:
3047 // - parent
3048 // - shared libraries
3049 // - class loader dex files
3050
3051 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3052 StackHandleScope<1> hs(self);
3053 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3054 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3055 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
3056 *result,
3057 self);
3058 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3059 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
3060 *result,
3061 self);
3062 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3063 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
3064 *result,
3065 self);
3066 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3067 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
3068 *result,
3069 self);
3070 // We did not find a class, but the class loader chain was recognized, so we
3071 // return true.
3072 return true;
3073 }
3074
3075 if (IsDelegateLastClassLoader(class_loader)) {
3076 // For delegate last, the search order is:
3077 // - boot class path
3078 // - shared libraries
3079 // - class loader dex files
3080 // - parent
3081 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3082 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
3083 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3084 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
3085 *result,
3086 self);
3087 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3088 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
3089 *result,
3090 self);
3091 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3092 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
3093 *result,
3094 self);
3095
3096 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
3097 StackHandleScope<1> hs(self);
3098 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
3099 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
3100 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
3101 *result,
3102 self);
3103 // We did not find a class, but the class loader chain was recognized, so we
3104 // return true.
3105 return true;
3106 }
3107
3108 // Unsupported class loader.
3109 *result = nullptr;
3110 return false;
3111 }
3112
3113 #undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
3114
3115 namespace {
3116
3117 // Matches exceptions caught in DexFile.defineClass.
MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,ClassLinker * class_linker)3118 ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
3119 ClassLinker* class_linker)
3120 REQUIRES_SHARED(Locks::mutator_lock_) {
3121 return
3122 // ClassNotFoundException.
3123 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
3124 class_linker))
3125 ||
3126 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
3127 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
3128 }
3129
3130 // Clear exceptions caught in DexFile.defineClass.
FilterDexFileCaughtExceptions(Thread * self,ClassLinker * class_linker)3131 ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
3132 REQUIRES_SHARED(Locks::mutator_lock_) {
3133 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
3134 self->ClearException();
3135 }
3136 }
3137
3138 } // namespace
3139
3140 // Finds the class in the boot class loader.
3141 // If the class is found the method returns the resolved class. Otherwise it returns null.
FindClassInBootClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::Class> * result)3142 bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
3143 const char* descriptor,
3144 size_t hash,
3145 /*out*/ ObjPtr<mirror::Class>* result) {
3146 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
3147 if (pair.second != nullptr) {
3148 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
3149 if (klass != nullptr) {
3150 *result = EnsureResolved(self, descriptor, klass);
3151 } else {
3152 *result = DefineClass(self,
3153 descriptor,
3154 hash,
3155 ScopedNullHandle<mirror::ClassLoader>(),
3156 *pair.first,
3157 *pair.second);
3158 }
3159 if (*result == nullptr) {
3160 CHECK(self->IsExceptionPending()) << descriptor;
3161 FilterDexFileCaughtExceptions(self, this);
3162 }
3163 }
3164 // The boot classloader is always a known lookup.
3165 return true;
3166 }
3167
FindClassInBaseDexClassLoaderClassPath(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,ObjPtr<mirror::Class> * result)3168 bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
3169 Thread* self,
3170 const char* descriptor,
3171 size_t hash,
3172 Handle<mirror::ClassLoader> class_loader,
3173 /*out*/ ObjPtr<mirror::Class>* result) {
3174 DCHECK(IsPathOrDexClassLoader(class_loader) ||
3175 IsInMemoryDexClassLoader(class_loader) ||
3176 IsDelegateLastClassLoader(class_loader))
3177 << "Unexpected class loader for descriptor " << descriptor;
3178
3179 const DexFile* dex_file = nullptr;
3180 const dex::ClassDef* class_def = nullptr;
3181 ObjPtr<mirror::Class> ret;
3182 auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
3183 const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
3184 if (cp_class_def != nullptr) {
3185 dex_file = cp_dex_file;
3186 class_def = cp_class_def;
3187 return false; // Found a class definition, stop visit.
3188 }
3189 return true; // Continue with the next DexFile.
3190 };
3191 VisitClassLoaderDexFiles(self, class_loader, find_class_def);
3192
3193 if (class_def != nullptr) {
3194 *result = DefineClass(self, descriptor, hash, class_loader, *dex_file, *class_def);
3195 if (UNLIKELY(*result == nullptr)) {
3196 CHECK(self->IsExceptionPending()) << descriptor;
3197 FilterDexFileCaughtExceptions(self, this);
3198 } else {
3199 DCHECK(!self->IsExceptionPending());
3200 }
3201 }
3202 // A BaseDexClassLoader is always a known lookup.
3203 return true;
3204 }
3205
FindClass(Thread * self,const char * descriptor,Handle<mirror::ClassLoader> class_loader)3206 ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
3207 const char* descriptor,
3208 Handle<mirror::ClassLoader> class_loader) {
3209 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
3210 DCHECK(self != nullptr);
3211 self->AssertNoPendingException();
3212 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
3213 if (descriptor[1] == '\0') {
3214 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3215 // for primitive classes that aren't backed by dex files.
3216 return FindPrimitiveClass(descriptor[0]);
3217 }
3218 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
3219 // Find the class in the loaded classes table.
3220 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
3221 if (klass != nullptr) {
3222 return EnsureResolved(self, descriptor, klass);
3223 }
3224 // Class is not yet loaded.
3225 if (descriptor[0] != '[' && class_loader == nullptr) {
3226 // Non-array class and the boot class loader, search the boot class path.
3227 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
3228 if (pair.second != nullptr) {
3229 return DefineClass(self,
3230 descriptor,
3231 hash,
3232 ScopedNullHandle<mirror::ClassLoader>(),
3233 *pair.first,
3234 *pair.second);
3235 } else {
3236 // The boot class loader is searched ahead of the application class loader, failures are
3237 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3238 // trigger the chaining with a proper stack trace.
3239 ObjPtr<mirror::Throwable> pre_allocated =
3240 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3241 self->SetException(pre_allocated);
3242 return nullptr;
3243 }
3244 }
3245 ObjPtr<mirror::Class> result_ptr;
3246 bool descriptor_equals;
3247 if (descriptor[0] == '[') {
3248 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
3249 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
3250 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
3251 descriptor_equals = true;
3252 } else {
3253 ScopedObjectAccessUnchecked soa(self);
3254 bool known_hierarchy =
3255 FindClassInBaseDexClassLoader(self, descriptor, hash, class_loader, &result_ptr);
3256 if (result_ptr != nullptr) {
3257 // The chain was understood and we found the class. We still need to add the class to
3258 // the class table to protect from racy programs that can try and redefine the path list
3259 // which would change the Class<?> returned for subsequent evaluation of const-class.
3260 DCHECK(known_hierarchy);
3261 DCHECK(result_ptr->DescriptorEquals(descriptor));
3262 descriptor_equals = true;
3263 } else if (!self->IsExceptionPending()) {
3264 // Either the chain wasn't understood or the class wasn't found.
3265 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3266 // we should return it instead of silently clearing and retrying.
3267 //
3268 // If the chain was understood but we did not find the class, let the Java-side
3269 // rediscover all this and throw the exception with the right stack trace. Note that
3270 // the Java-side could still succeed for racy programs if another thread is actively
3271 // modifying the class loader's path list.
3272
3273 // The runtime is not allowed to call into java from a runtime-thread so just abort.
3274 if (self->IsRuntimeThread()) {
3275 // Oops, we can't call into java so we can't run actual class-loader code.
3276 // This is true for e.g. for the compiler (jit or aot).
3277 ObjPtr<mirror::Throwable> pre_allocated =
3278 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3279 self->SetException(pre_allocated);
3280 return nullptr;
3281 }
3282
3283 // Inlined DescriptorToDot(descriptor) with extra validation.
3284 //
3285 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3286 // the DescriptorEquals() check below and give a confusing error message. For example,
3287 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3288 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3289 // "class loader [...] returned class java.lang.String instead of java.lang.String".
3290 size_t descriptor_length = strlen(descriptor);
3291 if (UNLIKELY(descriptor[0] != 'L') ||
3292 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3293 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3294 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3295 return nullptr;
3296 }
3297
3298 std::string class_name_string(descriptor + 1, descriptor_length - 2);
3299 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
3300 if (known_hierarchy &&
3301 fast_class_not_found_exceptions_ &&
3302 !Runtime::Current()->IsJavaDebuggable()) {
3303 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3304 // debuggable, optimize this path by throwing directly here without going back to Java
3305 // language. This reduces how many ClassNotFoundExceptions happen.
3306 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3307 "%s",
3308 class_name_string.c_str());
3309 } else {
3310 StackHandleScope<1u> hs(self);
3311 Handle<mirror::String> class_name_object = hs.NewHandle(
3312 mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
3313 if (class_name_object == nullptr) {
3314 DCHECK(self->IsExceptionPending()); // OOME.
3315 return nullptr;
3316 }
3317 DCHECK(class_loader != nullptr);
3318 result_ptr = ObjPtr<mirror::Class>::DownCast(
3319 WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
3320 self, class_loader.Get(), class_name_object.Get()));
3321 if (result_ptr == nullptr && !self->IsExceptionPending()) {
3322 // broken loader - throw NPE to be compatible with Dalvik
3323 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3324 class_name_string.c_str()).c_str());
3325 return nullptr;
3326 }
3327 // Check the name of the returned class.
3328 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
3329 }
3330 } else {
3331 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3332 }
3333 }
3334
3335 if (self->IsExceptionPending()) {
3336 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3337 // However, to comply with the RI behavior, first check if another thread succeeded.
3338 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3339 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3340 self->ClearException();
3341 return EnsureResolved(self, descriptor, result_ptr);
3342 }
3343 return nullptr;
3344 }
3345
3346 // Try to insert the class to the class table, checking for mismatch.
3347 ObjPtr<mirror::Class> old;
3348 {
3349 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3350 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3351 old = class_table->Lookup(descriptor, hash);
3352 if (old == nullptr) {
3353 old = result_ptr; // For the comparison below, after releasing the lock.
3354 if (descriptor_equals) {
3355 class_table->InsertWithHash(result_ptr, hash);
3356 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
3357 } // else throw below, after releasing the lock.
3358 }
3359 }
3360 if (UNLIKELY(old != result_ptr)) {
3361 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3362 // capable class loaders. (All class loaders are considered parallel capable on Android.)
3363 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
3364 const char* loader_class_name =
3365 loader_class->GetDexFile().GetTypeDescriptor(loader_class->GetDexTypeIndex());
3366 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3367 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3368 << DescriptorToDot(descriptor) << "\").";
3369 return EnsureResolved(self, descriptor, old);
3370 }
3371 if (UNLIKELY(!descriptor_equals)) {
3372 std::string result_storage;
3373 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3374 std::string loader_storage;
3375 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3376 ThrowNoClassDefFoundError(
3377 "Initiating class loader of type %s returned class %s instead of %s.",
3378 DescriptorToDot(loader_class_name).c_str(),
3379 DescriptorToDot(result_name).c_str(),
3380 DescriptorToDot(descriptor).c_str());
3381 return nullptr;
3382 }
3383 // Success.
3384 return result_ptr;
3385 }
3386
3387 // Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3388 // define-class and how many recursive DefineClasses we are at in order to allow for doing things
3389 // like pausing class definition.
3390 struct ScopedDefiningClass {
3391 public:
REQUIRES_SHAREDart::ScopedDefiningClass3392 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3393 : self_(self), returned_(false) {
3394 Locks::mutator_lock_->AssertSharedHeld(self_);
3395 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3396 self_->IncrDefineClassCount();
3397 }
REQUIRES_SHAREDart::ScopedDefiningClass3398 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3399 Locks::mutator_lock_->AssertSharedHeld(self_);
3400 CHECK(returned_);
3401 }
3402
Finishart::ScopedDefiningClass3403 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3404 REQUIRES_SHARED(Locks::mutator_lock_) {
3405 CHECK(!returned_);
3406 self_->DecrDefineClassCount();
3407 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3408 Thread::PoisonObjectPointersIfDebug();
3409 returned_ = true;
3410 return h_klass.Get();
3411 }
3412
Finishart::ScopedDefiningClass3413 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3414 REQUIRES_SHARED(Locks::mutator_lock_) {
3415 StackHandleScope<1> hs(self_);
3416 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3417 return Finish(h_klass);
3418 }
3419
Finishart::ScopedDefiningClass3420 ObjPtr<mirror::Class> Finish([[maybe_unused]] nullptr_t np)
3421 REQUIRES_SHARED(Locks::mutator_lock_) {
3422 ScopedNullHandle<mirror::Class> snh;
3423 return Finish(snh);
3424 }
3425
3426 private:
3427 Thread* self_;
3428 bool returned_;
3429 };
3430
DefineClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader,const DexFile & dex_file,const dex::ClassDef & dex_class_def)3431 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3432 const char* descriptor,
3433 size_t hash,
3434 Handle<mirror::ClassLoader> class_loader,
3435 const DexFile& dex_file,
3436 const dex::ClassDef& dex_class_def) {
3437 ScopedDefiningClass sdc(self);
3438 StackHandleScope<3> hs(self);
3439 metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
3440 metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
3441 auto klass = hs.NewHandle<mirror::Class>(nullptr);
3442
3443 // Load the class from the dex file.
3444 if (UNLIKELY(!init_done_)) {
3445 // finish up init of hand crafted class_roots_
3446 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
3447 klass.Assign(GetClassRoot<mirror::Object>(this));
3448 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
3449 klass.Assign(GetClassRoot<mirror::Class>(this));
3450 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3451 klass.Assign(GetClassRoot<mirror::String>(this));
3452 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
3453 klass.Assign(GetClassRoot<mirror::Reference>(this));
3454 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
3455 klass.Assign(GetClassRoot<mirror::DexCache>(this));
3456 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
3457 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
3458 }
3459 }
3460
3461 // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3462 // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3463 // public class path then we prevent the definition of the class.
3464 //
3465 // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3466 // classpath is not checked.
3467 if (class_loader == nullptr &&
3468 Runtime::Current()->IsAotCompiler() &&
3469 DenyAccessBasedOnPublicSdk(descriptor)) {
3470 ObjPtr<mirror::Throwable> pre_allocated =
3471 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3472 self->SetException(pre_allocated);
3473 return sdc.Finish(nullptr);
3474 }
3475
3476 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3477 // code to be executed. We put it up here so we can avoid all the allocations associated with
3478 // creating the class. This can happen with (eg) jit threads.
3479 if (!self->CanLoadClasses()) {
3480 // Make sure we don't try to load anything, potentially causing an infinite loop.
3481 ObjPtr<mirror::Throwable> pre_allocated =
3482 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3483 self->SetException(pre_allocated);
3484 return sdc.Finish(nullptr);
3485 }
3486
3487 ScopedTrace trace(descriptor);
3488 if (klass == nullptr) {
3489 // Allocate a class with the status of not ready.
3490 // Interface object should get the right size here. Regular class will
3491 // figure out the right size later and be replaced with one of the right
3492 // size when the class becomes resolved.
3493 if (CanAllocClass()) {
3494 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3495 } else {
3496 return sdc.Finish(nullptr);
3497 }
3498 }
3499 if (UNLIKELY(klass == nullptr)) {
3500 self->AssertPendingOOMException();
3501 return sdc.Finish(nullptr);
3502 }
3503 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3504 // nothing.
3505 DexFile const* new_dex_file = nullptr;
3506 dex::ClassDef const* new_class_def = nullptr;
3507 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3508 // will only be called once.
3509 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3510 klass,
3511 class_loader,
3512 dex_file,
3513 dex_class_def,
3514 &new_dex_file,
3515 &new_class_def);
3516 // Check to see if an exception happened during runtime callbacks. Return if so.
3517 if (self->IsExceptionPending()) {
3518 return sdc.Finish(nullptr);
3519 }
3520 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
3521 if (dex_cache == nullptr) {
3522 self->AssertPendingException();
3523 return sdc.Finish(nullptr);
3524 }
3525 klass->SetDexCache(dex_cache);
3526 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
3527
3528 // Mark the string class by setting its access flag.
3529 if (UNLIKELY(!init_done_)) {
3530 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3531 klass->SetStringClass();
3532 }
3533 }
3534
3535 ObjectLock<mirror::Class> lock(self, klass);
3536 klass->SetClinitThreadId(self->GetTid());
3537 // Make sure we have a valid empty iftable even if there are errors.
3538 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
3539
3540 // Add the newly loaded class to the loaded classes table.
3541 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
3542 if (existing != nullptr) {
3543 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3544 // this thread to block.
3545 return sdc.Finish(EnsureResolved(self, descriptor, existing));
3546 }
3547
3548 // Load the fields and other things after we are inserted in the table. This is so that we don't
3549 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3550 // other reason is that the field roots are only visited from the class table. So we need to be
3551 // inserted before we allocate / fill in these fields.
3552 LoadClass(self, *new_dex_file, *new_class_def, klass);
3553 if (self->IsExceptionPending()) {
3554 VLOG(class_linker) << self->GetException()->Dump();
3555 // An exception occured during load, set status to erroneous while holding klass' lock in case
3556 // notification is necessary.
3557 if (!klass->IsErroneous()) {
3558 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3559 }
3560 return sdc.Finish(nullptr);
3561 }
3562
3563 // Finish loading (if necessary) by finding parents
3564 CHECK(!klass->IsLoaded());
3565 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
3566 // Loading failed.
3567 if (!klass->IsErroneous()) {
3568 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3569 }
3570 return sdc.Finish(nullptr);
3571 }
3572 CHECK(klass->IsLoaded());
3573
3574 // At this point the class is loaded. Publish a ClassLoad event.
3575 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
3576 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
3577
3578 // Link the class (if necessary)
3579 CHECK(!klass->IsResolved());
3580 // TODO: Use fast jobjects?
3581 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
3582
3583 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
3584 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
3585 // Linking failed.
3586 if (!klass->IsErroneous()) {
3587 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
3588 }
3589 return sdc.Finish(nullptr);
3590 }
3591 self->AssertNoPendingException();
3592 CHECK(h_new_class != nullptr) << descriptor;
3593 CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
3594
3595 // Instrumentation may have updated entrypoints for all methods of all
3596 // classes. However it could not update methods of this class while we
3597 // were loading it. Now the class is resolved, we can update entrypoints
3598 // as required by instrumentation.
3599 if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
3600 // We must be in the kRunnable state to prevent instrumentation from
3601 // suspending all threads to update entrypoints while we are doing it
3602 // for this class.
3603 DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
3604 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
3605 }
3606
3607 /*
3608 * We send CLASS_PREPARE events to the debugger from here. The
3609 * definition of "preparation" is creating the static fields for a
3610 * class and initializing them to the standard default values, but not
3611 * executing any code (that comes later, during "initialization").
3612 *
3613 * We did the static preparation in LinkClass.
3614 *
3615 * The class has been prepared and resolved but possibly not yet verified
3616 * at this point.
3617 */
3618 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
3619
3620 // Notify native debugger of the new class and its layout.
3621 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3622
3623 return sdc.Finish(h_new_class);
3624 }
3625
SizeOfClassWithoutEmbeddedTables(const DexFile & dex_file,const dex::ClassDef & dex_class_def)3626 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
3627 const dex::ClassDef& dex_class_def) {
3628 size_t num_ref = 0;
3629 size_t num_8 = 0;
3630 size_t num_16 = 0;
3631 size_t num_32 = 0;
3632 size_t num_64 = 0;
3633 ClassAccessor accessor(dex_file, dex_class_def);
3634 // We allow duplicate definitions of the same field in a class_data_item
3635 // but ignore the repeated indexes here, b/21868015.
3636 uint32_t last_field_idx = dex::kDexNoIndex;
3637 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3638 uint32_t field_idx = field.GetIndex();
3639 // Ordering enforced by DexFileVerifier.
3640 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3641 if (UNLIKELY(field_idx == last_field_idx)) {
3642 continue;
3643 }
3644 last_field_idx = field_idx;
3645 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
3646 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3647 char c = descriptor[0];
3648 switch (c) {
3649 case 'L':
3650 case '[':
3651 num_ref++;
3652 break;
3653 case 'J':
3654 case 'D':
3655 num_64++;
3656 break;
3657 case 'I':
3658 case 'F':
3659 num_32++;
3660 break;
3661 case 'S':
3662 case 'C':
3663 num_16++;
3664 break;
3665 case 'B':
3666 case 'Z':
3667 num_8++;
3668 break;
3669 default:
3670 LOG(FATAL) << "Unknown descriptor: " << c;
3671 UNREACHABLE();
3672 }
3673 }
3674 return mirror::Class::ComputeClassSize(false,
3675 0,
3676 num_8,
3677 num_16,
3678 num_32,
3679 num_64,
3680 num_ref,
3681 image_pointer_size_);
3682 }
3683
FixupStaticTrampolines(Thread * self,ObjPtr<mirror::Class> klass)3684 void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
3685 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3686 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
3687 size_t num_direct_methods = klass->NumDirectMethods();
3688 if (num_direct_methods == 0) {
3689 return; // No direct methods => no static methods.
3690 }
3691 if (UNLIKELY(klass->IsProxyClass())) {
3692 return;
3693 }
3694 PointerSize pointer_size = image_pointer_size_;
3695 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3696 klass->GetDirectMethods(pointer_size).end(),
3697 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3698 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3699 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3700 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3701 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3702 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3703 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3704 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3705 lb->first->SetEntryPointFromJni(lb->second);
3706 lb = critical_native_code_with_clinit_check_.erase(lb);
3707 }
3708 }
3709 Runtime* runtime = Runtime::Current();
3710 if (runtime->IsAotCompiler()) {
3711 // We should not update entrypoints when running the transactional
3712 // interpreter.
3713 return;
3714 }
3715
3716 instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
3717 bool enable_boot_jni_stub = !runtime->IsJavaDebuggable();
3718 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3719 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
3720 if (method->NeedsClinitCheckBeforeCall()) {
3721 const void* quick_code = instrumentation->GetCodeForInvoke(method);
3722 if (method->IsNative() && IsQuickGenericJniStub(quick_code) && enable_boot_jni_stub) {
3723 const void* boot_jni_stub = FindBootJniStub(method);
3724 if (boot_jni_stub != nullptr) {
3725 // Use boot JNI stub if found.
3726 quick_code = boot_jni_stub;
3727 }
3728 }
3729 instrumentation->UpdateMethodsCode(method, quick_code);
3730 }
3731 }
3732 // Ignore virtual methods on the iterator.
3733 }
3734
3735 // Does anything needed to make sure that the compiler will not generate a direct invoke to this
3736 // method. Should only be called on non-invokable methods.
EnsureThrowsInvocationError(ClassLinker * class_linker,ArtMethod * method)3737 inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3738 REQUIRES_SHARED(Locks::mutator_lock_) {
3739 DCHECK(method != nullptr);
3740 DCHECK(!method->IsInvokable());
3741 method->SetEntryPointFromQuickCompiledCodePtrSize(
3742 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3743 class_linker->GetImagePointerSize());
3744 }
3745
3746 class ClassLinker::OatClassCodeIterator {
3747 public:
OatClassCodeIterator(const OatFile::OatClass & oat_class)3748 explicit OatClassCodeIterator(const OatFile::OatClass& oat_class)
3749 : begin_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3750 ? oat_class.oat_file_->Begin()
3751 : nullptr),
3752 bitmap_(oat_class.bitmap_),
3753 current_(oat_class.methods_pointer_ != nullptr && oat_class.oat_file_->IsExecutable()
3754 ? oat_class.methods_pointer_
3755 : nullptr),
3756 method_index_(0u),
3757 num_methods_(oat_class.num_methods_) {
3758 DCHECK_EQ(bitmap_ != nullptr, oat_class.GetType() == OatClassType::kSomeCompiled);
3759 }
3760
GetAndAdvance(uint32_t method_index)3761 const void* GetAndAdvance(uint32_t method_index) {
3762 if (kIsDebugBuild) {
3763 CHECK_EQ(method_index, method_index_);
3764 ++method_index_;
3765 }
3766 if (current_ == nullptr) {
3767 // We may not have a valid `num_methods_` to perform the next `DCHECK()`.
3768 return nullptr;
3769 }
3770 DCHECK_LT(method_index, num_methods_);
3771 DCHECK(begin_ != nullptr);
3772 if (bitmap_ == nullptr || BitVector::IsBitSet(bitmap_, method_index)) {
3773 DCHECK_NE(current_->code_offset_, 0u);
3774 const void* result = begin_ + current_->code_offset_;
3775 ++current_;
3776 return result;
3777 } else {
3778 return nullptr;
3779 }
3780 }
3781
SkipAbstract(uint32_t method_index)3782 void SkipAbstract(uint32_t method_index) {
3783 if (kIsDebugBuild) {
3784 CHECK_EQ(method_index, method_index_);
3785 ++method_index_;
3786 if (current_ != nullptr) {
3787 CHECK_LT(method_index, num_methods_);
3788 CHECK(bitmap_ != nullptr);
3789 CHECK(!BitVector::IsBitSet(bitmap_, method_index));
3790 }
3791 }
3792 }
3793
3794 private:
3795 const uint8_t* const begin_;
3796 const uint32_t* const bitmap_;
3797 const OatMethodOffsets* current_;
3798
3799 // Debug mode members.
3800 uint32_t method_index_;
3801 const uint32_t num_methods_;
3802 };
3803
LinkCode(ArtMethod * method,uint32_t class_def_method_index,OatClassCodeIterator * occi)3804 inline void ClassLinker::LinkCode(ArtMethod* method,
3805 uint32_t class_def_method_index,
3806 /*inout*/ OatClassCodeIterator* occi) {
3807 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3808 Runtime* const runtime = Runtime::Current();
3809 if (runtime->IsAotCompiler()) {
3810 // The following code only applies to a non-compiler runtime.
3811 return;
3812 }
3813
3814 // Method shouldn't have already been linked.
3815 DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr);
3816 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
3817
3818 if (!method->IsInvokable()) {
3819 EnsureThrowsInvocationError(this, method);
3820 occi->SkipAbstract(class_def_method_index);
3821 return;
3822 }
3823
3824 const void* quick_code = occi->GetAndAdvance(class_def_method_index);
3825 if (method->IsNative() && quick_code == nullptr) {
3826 const void* boot_jni_stub = FindBootJniStub(method);
3827 if (boot_jni_stub != nullptr) {
3828 // Use boot JNI stub if found.
3829 quick_code = boot_jni_stub;
3830 }
3831 }
3832 runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
3833
3834 if (method->IsNative()) {
3835 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3836 // as the extra processing for @CriticalNative is not needed yet.
3837 method->SetEntryPointFromJni(
3838 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
3839 }
3840 }
3841
SetupClass(const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass,ObjPtr<mirror::ClassLoader> class_loader)3842 void ClassLinker::SetupClass(const DexFile& dex_file,
3843 const dex::ClassDef& dex_class_def,
3844 Handle<mirror::Class> klass,
3845 ObjPtr<mirror::ClassLoader> class_loader) {
3846 CHECK(klass != nullptr);
3847 CHECK(klass->GetDexCache() != nullptr);
3848 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
3849 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
3850 CHECK(descriptor != nullptr);
3851
3852 klass->SetClass(GetClassRoot<mirror::Class>(this));
3853 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
3854 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
3855 klass->SetAccessFlagsDuringLinking(access_flags);
3856 klass->SetClassLoader(class_loader);
3857 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
3858 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
3859
3860 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
3861 klass->SetDexTypeIndex(dex_class_def.class_idx_);
3862 }
3863
AllocArtFieldArray(Thread * self,LinearAlloc * allocator,size_t length)3864 LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3865 LinearAlloc* allocator,
3866 size_t length) {
3867 if (length == 0) {
3868 return nullptr;
3869 }
3870 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3871 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3872 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
3873 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
3874 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
3875 CHECK(ret != nullptr);
3876 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3877 return ret;
3878 }
3879
AllocArtMethodArray(Thread * self,LinearAlloc * allocator,size_t length)3880 LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3881 LinearAlloc* allocator,
3882 size_t length) {
3883 if (length == 0) {
3884 return nullptr;
3885 }
3886 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3887 const size_t method_size = ArtMethod::Size(image_pointer_size_);
3888 const size_t storage_size =
3889 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
3890 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
3891 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
3892 CHECK(ret != nullptr);
3893 for (size_t i = 0; i < length; ++i) {
3894 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
3895 }
3896 return ret;
3897 }
3898
GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3899 LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3900 if (class_loader == nullptr) {
3901 return Runtime::Current()->GetLinearAlloc();
3902 }
3903 LinearAlloc* allocator = class_loader->GetAllocator();
3904 DCHECK(allocator != nullptr);
3905 return allocator;
3906 }
3907
GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)3908 LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
3909 if (class_loader == nullptr) {
3910 return Runtime::Current()->GetLinearAlloc();
3911 }
3912 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3913 LinearAlloc* allocator = class_loader->GetAllocator();
3914 if (allocator == nullptr) {
3915 RegisterClassLoader(class_loader);
3916 allocator = class_loader->GetAllocator();
3917 CHECK(allocator != nullptr);
3918 }
3919 return allocator;
3920 }
3921
3922 // Helper class for iterating over method annotations, using their ordering in the dex file.
3923 // Since direct and virtual methods are separated (but each section is ordered), we shall use
3924 // separate iterators for loading direct and virtual methods.
3925 class ClassLinker::MethodAnnotationsIterator {
3926 public:
MethodAnnotationsIterator(const DexFile & dex_file,const dex::AnnotationsDirectoryItem * annotations_dir)3927 MethodAnnotationsIterator(const DexFile& dex_file,
3928 const dex::AnnotationsDirectoryItem* annotations_dir)
3929 : current_((annotations_dir != nullptr) ? dex_file.GetMethodAnnotations(annotations_dir)
3930 : nullptr),
3931 end_((annotations_dir != nullptr) ? current_ + annotations_dir->methods_size_ : nullptr) {}
3932
AdvanceTo(uint32_t method_idx)3933 const dex::MethodAnnotationsItem* AdvanceTo(uint32_t method_idx) {
3934 while (current_ != end_ && current_->method_idx_ < method_idx) {
3935 ++current_;
3936 }
3937 return (current_ != end_ && current_->method_idx_ == method_idx) ? current_ : nullptr;
3938 }
3939
3940 private:
3941 const dex::MethodAnnotationsItem* current_;
3942 const dex::MethodAnnotationsItem* const end_;
3943 };
3944
LoadClass(Thread * self,const DexFile & dex_file,const dex::ClassDef & dex_class_def,Handle<mirror::Class> klass)3945 void ClassLinker::LoadClass(Thread* self,
3946 const DexFile& dex_file,
3947 const dex::ClassDef& dex_class_def,
3948 Handle<mirror::Class> klass) {
3949 ClassAccessor accessor(dex_file,
3950 dex_class_def,
3951 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
3952 if (!accessor.HasClassData()) {
3953 return;
3954 }
3955 Runtime* const runtime = Runtime::Current();
3956 {
3957 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3958 // Class::VisitFieldRoots may miss some fields or methods.
3959 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
3960 // Load static fields.
3961 // We allow duplicate definitions of the same field in a class_data_item
3962 // but ignore the repeated indexes here, b/21868015.
3963 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
3964 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3965 allocator,
3966 accessor.NumStaticFields());
3967 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3968 allocator,
3969 accessor.NumInstanceFields());
3970 size_t num_sfields = 0u;
3971 size_t num_ifields = 0u;
3972 uint32_t last_static_field_idx = 0u;
3973 uint32_t last_instance_field_idx = 0u;
3974
3975 // Methods
3976 bool has_oat_class = false;
3977 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3978 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3979 : OatFile::OatClass::Invalid();
3980 OatClassCodeIterator occi(oat_class);
3981 klass->SetMethodsPtr(
3982 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3983 accessor.NumDirectMethods(),
3984 accessor.NumVirtualMethods());
3985 size_t class_def_method_index = 0;
3986 uint32_t last_dex_method_index = dex::kDexNoIndex;
3987 size_t last_class_def_method_index = 0;
3988
3989 // Initialize separate `MethodAnnotationsIterator`s for direct and virtual methods.
3990 MethodAnnotationsIterator mai_direct(dex_file, dex_file.GetAnnotationsDirectory(dex_class_def));
3991 MethodAnnotationsIterator mai_virtual = mai_direct;
3992
3993 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
3994 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3995 // methods needs to decode all of the fields.
3996 accessor.VisitFieldsAndMethods([&](
3997 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3998 uint32_t field_idx = field.GetIndex();
3999 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
4000 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
4001 LoadField(field, klass, &sfields->At(num_sfields));
4002 ++num_sfields;
4003 last_static_field_idx = field_idx;
4004 }
4005 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
4006 uint32_t field_idx = field.GetIndex();
4007 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
4008 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
4009 LoadField(field, klass, &ifields->At(num_ifields));
4010 ++num_ifields;
4011 last_instance_field_idx = field_idx;
4012 }
4013 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
4014 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
4015 image_pointer_size_);
4016 LoadMethod(dex_file, method, klass.Get(), &mai_direct, art_method);
4017 LinkCode(art_method, class_def_method_index, &occi);
4018 uint32_t it_method_index = method.GetIndex();
4019 if (last_dex_method_index == it_method_index) {
4020 // duplicate case
4021 art_method->SetMethodIndex(last_class_def_method_index);
4022 } else {
4023 art_method->SetMethodIndex(class_def_method_index);
4024 last_dex_method_index = it_method_index;
4025 last_class_def_method_index = class_def_method_index;
4026 }
4027 art_method->ResetCounter(hotness_threshold);
4028 ++class_def_method_index;
4029 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
4030 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
4031 class_def_method_index - accessor.NumDirectMethods(),
4032 image_pointer_size_);
4033 art_method->ResetCounter(hotness_threshold);
4034 LoadMethod(dex_file, method, klass.Get(), &mai_virtual, art_method);
4035 LinkCode(art_method, class_def_method_index, &occi);
4036 ++class_def_method_index;
4037 });
4038
4039 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
4040 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
4041 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
4042 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
4043 << ")";
4044 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
4045 if (sfields != nullptr) {
4046 sfields->SetSize(num_sfields);
4047 }
4048 if (ifields != nullptr) {
4049 ifields->SetSize(num_ifields);
4050 }
4051 }
4052 // Set the field arrays.
4053 klass->SetSFieldsPtr(sfields);
4054 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
4055 klass->SetIFieldsPtr(ifields);
4056 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
4057 }
4058 // Ensure that the card is marked so that remembered sets pick up native roots.
4059 WriteBarrier::ForEveryFieldWrite(klass.Get());
4060 self->AllowThreadSuspension();
4061 }
4062
LoadField(const ClassAccessor::Field & field,Handle<mirror::Class> klass,ArtField * dst)4063 void ClassLinker::LoadField(const ClassAccessor::Field& field,
4064 Handle<mirror::Class> klass,
4065 ArtField* dst) {
4066 const uint32_t field_idx = field.GetIndex();
4067 dst->SetDexFieldIndex(field_idx);
4068 dst->SetDeclaringClass(klass.Get());
4069
4070 // Get access flags from the DexFile and set hiddenapi runtime access flags.
4071 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
4072 }
4073
LoadMethod(const DexFile & dex_file,const ClassAccessor::Method & method,ObjPtr<mirror::Class> klass,MethodAnnotationsIterator * mai,ArtMethod * dst)4074 void ClassLinker::LoadMethod(const DexFile& dex_file,
4075 const ClassAccessor::Method& method,
4076 ObjPtr<mirror::Class> klass,
4077 /*inout*/ MethodAnnotationsIterator* mai,
4078 /*out*/ ArtMethod* dst) {
4079 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
4080
4081 const uint32_t dex_method_idx = method.GetIndex();
4082 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
4083 uint32_t name_utf16_length;
4084 const char* method_name = dex_file.GetStringDataAndUtf16Length(method_id.name_idx_,
4085 &name_utf16_length);
4086 std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
4087
4088 dst->SetDexMethodIndex(dex_method_idx);
4089 dst->SetDeclaringClass(klass);
4090
4091 // Get access flags from the DexFile and set hiddenapi runtime access flags.
4092 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
4093
4094 auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
4095 size_t length) ALWAYS_INLINE {
4096 DCHECK_EQ(strlen(ascii_name), length);
4097 return length == name_utf16_length &&
4098 method_name[length] == 0 && // Is `method_name` an ASCII string?
4099 memcmp(ascii_name, method_name, length) == 0;
4100 };
4101 if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
4102 // Set finalizable flag on declaring class if the method has the right signature.
4103 // When initializing without a boot image, `Object` and `Enum` shall have the finalizable
4104 // flag cleared immediately after loading these classes, see `InitWithoutImage()`.
4105 if (shorty == "V") {
4106 klass->SetFinalizable();
4107 }
4108 } else if (method_name[0] == '<') {
4109 // Fix broken access flags for initializers. Bug 11157540.
4110 bool is_init = has_ascii_name("<init>", sizeof("<init>") - 1u);
4111 bool is_clinit = has_ascii_name("<clinit>", sizeof("<clinit>") - 1u);
4112 if (UNLIKELY(!is_init && !is_clinit)) {
4113 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
4114 } else {
4115 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
4116 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
4117 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
4118 access_flags |= kAccConstructor;
4119 }
4120 }
4121 }
4122
4123 access_flags |= GetNterpFastPathFlags(shorty, access_flags, kRuntimeISA);
4124
4125 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
4126 // Check if the native method is annotated with @FastNative or @CriticalNative.
4127 const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx);
4128 if (method_annotations != nullptr) {
4129 access_flags |=
4130 annotations::GetNativeMethodAnnotationAccessFlags(dex_file, *method_annotations);
4131 }
4132 dst->SetAccessFlags(access_flags);
4133 DCHECK(!dst->IsAbstract());
4134 DCHECK(!dst->HasCodeItem());
4135 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4136 dst->SetDataPtrSize(nullptr, image_pointer_size_); // JNI stub/trampoline not linked yet.
4137 } else if ((access_flags & kAccAbstract) != 0u) {
4138 dst->SetAccessFlags(access_flags);
4139 // Must be done after SetAccessFlags since IsAbstract depends on it.
4140 DCHECK(dst->IsAbstract());
4141 if (klass->IsInterface()) {
4142 dst->CalculateAndSetImtIndex();
4143 }
4144 DCHECK(!dst->HasCodeItem());
4145 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
4146 dst->SetDataPtrSize(nullptr, image_pointer_size_); // Single implementation not set yet.
4147 } else {
4148 const dex::MethodAnnotationsItem* method_annotations = mai->AdvanceTo(dex_method_idx);
4149 if (method_annotations != nullptr &&
4150 annotations::MethodIsNeverCompile(dex_file, *method_annotations)) {
4151 access_flags |= kAccCompileDontBother;
4152 }
4153 dst->SetAccessFlags(access_flags);
4154 DCHECK(!dst->IsAbstract());
4155 DCHECK(dst->HasCodeItem());
4156 uint32_t code_item_offset = method.GetCodeItemOffset();
4157 DCHECK_NE(code_item_offset, 0u);
4158 if (Runtime::Current()->IsAotCompiler()) {
4159 dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
4160 } else {
4161 dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
4162 }
4163 }
4164
4165 if ((access_flags & kAccAbstract) == 0u &&
4166 Runtime::Current()->IsZygote() &&
4167 !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
4168 DCHECK(!ArtMethod::IsAbstract(access_flags));
4169 DCHECK(!ArtMethod::IsIntrinsic(access_flags));
4170 dst->SetMemorySharedMethod();
4171 dst->SetHotCounter();
4172 }
4173 }
4174
AppendToBootClassPath(Thread * self,const DexFile * dex_file)4175 void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
4176 ObjPtr<mirror::DexCache> dex_cache =
4177 AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
4178 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
4179 AppendToBootClassPath(dex_file, dex_cache);
4180 WriteBarrierOnClassLoader(self, /*class_loader=*/nullptr, dex_cache);
4181 }
4182
AppendToBootClassPath(const DexFile * dex_file,ObjPtr<mirror::DexCache> dex_cache)4183 void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
4184 ObjPtr<mirror::DexCache> dex_cache) {
4185 CHECK(dex_file != nullptr);
4186 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
4187 CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
4188 boot_class_path_.push_back(dex_file);
4189 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
4190 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
4191 }
4192
RegisterDexFileLocked(const DexFile & dex_file,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4193 void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
4194 ObjPtr<mirror::DexCache> dex_cache,
4195 ObjPtr<mirror::ClassLoader> class_loader) {
4196 Thread* const self = Thread::Current();
4197 Locks::dex_lock_->AssertExclusiveHeld(self);
4198 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
4199 CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
4200 // For app images, the dex cache location may be a suffix of the dex file location since the
4201 // dex file location is an absolute path.
4202 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
4203 const size_t dex_cache_length = dex_cache_location.length();
4204 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
4205 std::string dex_file_location = dex_file.GetLocation();
4206 // The following paths checks don't work on preopt when using boot dex files, where the dex
4207 // cache location is the one on device, and the dex_file's location is the one on host.
4208 Runtime* runtime = Runtime::Current();
4209 if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
4210 CHECK_GE(dex_file_location.length(), dex_cache_length)
4211 << dex_cache_location << " " << dex_file.GetLocation();
4212 const std::string dex_file_suffix = dex_file_location.substr(
4213 dex_file_location.length() - dex_cache_length,
4214 dex_cache_length);
4215 // Example dex_cache location is SettingsProvider.apk and
4216 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
4217 CHECK_EQ(dex_cache_location, dex_file_suffix);
4218 }
4219
4220 // Check if we need to initialize OatFile data (.data.img.rel.ro and .bss
4221 // sections) needed for code execution and register the oat code range.
4222 const OatFile* oat_file =
4223 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
4224 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
4225 if (initialize_oat_file_data) {
4226 for (const auto& entry : dex_caches_) {
4227 if (!self->IsJWeakCleared(entry.second.weak_root) &&
4228 entry.first->GetOatDexFile() != nullptr &&
4229 entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
4230 initialize_oat_file_data = false; // Already initialized.
4231 break;
4232 }
4233 }
4234 }
4235 if (initialize_oat_file_data) {
4236 oat_file->InitializeRelocations();
4237 // Notify the fault handler about the new executable code range if needed.
4238 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
4239 DCHECK_LE(exec_offset, oat_file->Size());
4240 size_t exec_size = oat_file->Size() - exec_offset;
4241 if (exec_size != 0u) {
4242 runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
4243 }
4244 }
4245
4246 // Let hiddenapi assign a domain to the newly registered dex file.
4247 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
4248
4249 jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
4250 DexCacheData data;
4251 data.weak_root = dex_cache_jweak;
4252 data.class_table = ClassTableForClassLoader(class_loader);
4253 AddNativeDebugInfoForDex(self, &dex_file);
4254 DCHECK(data.class_table != nullptr);
4255 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
4256 // path dex caches without an image.
4257 data.class_table->InsertStrongRoot(dex_cache);
4258 // Make sure that the dex cache holds the classloader live.
4259 dex_cache->SetClassLoader(class_loader);
4260 if (class_loader != nullptr) {
4261 // Since we added a strong root to the class table, do the write barrier as required for
4262 // remembered sets and generational GCs.
4263 WriteBarrier::ForEveryFieldWrite(class_loader);
4264 }
4265 bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
4266 CHECK(inserted);
4267 }
4268
DecodeDexCacheLocked(Thread * self,const DexCacheData * data)4269 ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
4270 return data != nullptr
4271 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
4272 : nullptr;
4273 }
4274
IsSameClassLoader(ObjPtr<mirror::DexCache> dex_cache,const DexCacheData * data,ObjPtr<mirror::ClassLoader> class_loader)4275 bool ClassLinker::IsSameClassLoader(
4276 ObjPtr<mirror::DexCache> dex_cache,
4277 const DexCacheData* data,
4278 ObjPtr<mirror::ClassLoader> class_loader) {
4279 CHECK(data != nullptr);
4280 DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
4281 return data->class_table == ClassTableForClassLoader(class_loader);
4282 }
4283
RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)4284 void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4285 ObjPtr<mirror::ClassLoader> class_loader) {
4286 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
4287 Thread* self = Thread::Current();
4288 StackHandleScope<2> hs(self);
4289 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4290 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4291 const DexFile* dex_file = dex_cache->GetDexFile();
4292 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4293 if (kIsDebugBuild) {
4294 ReaderMutexLock mu(self, *Locks::dex_lock_);
4295 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4296 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
4297 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4298 << "been registered on dex file " << dex_file->GetLocation();
4299 }
4300 ClassTable* table;
4301 {
4302 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4303 table = InsertClassTableForClassLoader(h_class_loader.Get());
4304 }
4305 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4306 // a thread holding the dex lock and blocking on a condition variable regarding
4307 // weak references access, and a thread blocking on the dex lock.
4308 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4309 WriterMutexLock mu(self, *Locks::dex_lock_);
4310 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4311 table->InsertStrongRoot(h_dex_cache.Get());
4312 if (h_class_loader.Get() != nullptr) {
4313 // Since we added a strong root to the class table, do the write barrier as required for
4314 // remembered sets and generational GCs.
4315 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
4316 }
4317 }
4318
ThrowDexFileAlreadyRegisteredError(Thread * self,const DexFile & dex_file)4319 static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4320 REQUIRES_SHARED(Locks::mutator_lock_) {
4321 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
4322 "Attempt to register dex file %s with multiple class loaders",
4323 dex_file.GetLocation().c_str());
4324 }
4325
WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4326 void ClassLinker::WriteBarrierOnClassLoaderLocked(ObjPtr<mirror::ClassLoader> class_loader,
4327 ObjPtr<mirror::Object> root) {
4328 if (class_loader != nullptr) {
4329 // Since we added a strong root to the class table, do the write barrier as required for
4330 // remembered sets and generational GCs.
4331 WriteBarrier::ForEveryFieldWrite(class_loader);
4332 } else if (log_new_roots_) {
4333 new_roots_.push_back(GcRoot<mirror::Object>(root));
4334 }
4335 }
4336
WriteBarrierOnClassLoader(Thread * self,ObjPtr<mirror::ClassLoader> class_loader,ObjPtr<mirror::Object> root)4337 void ClassLinker::WriteBarrierOnClassLoader(Thread* self,
4338 ObjPtr<mirror::ClassLoader> class_loader,
4339 ObjPtr<mirror::Object> root) {
4340 if (class_loader != nullptr) {
4341 // Since we added a strong root to the class table, do the write barrier as required for
4342 // remembered sets and generational GCs.
4343 WriteBarrier::ForEveryFieldWrite(class_loader);
4344 } else {
4345 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4346 if (log_new_roots_) {
4347 new_roots_.push_back(GcRoot<mirror::Object>(root));
4348 }
4349 }
4350 }
4351
RegisterDexFile(const DexFile & dex_file,ObjPtr<mirror::ClassLoader> class_loader)4352 ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4353 ObjPtr<mirror::ClassLoader> class_loader) {
4354 Thread* self = Thread::Current();
4355 ObjPtr<mirror::DexCache> old_dex_cache;
4356 bool registered_with_another_class_loader = false;
4357 {
4358 ReaderMutexLock mu(self, *Locks::dex_lock_);
4359 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4360 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4361 if (old_dex_cache != nullptr) {
4362 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4363 return old_dex_cache;
4364 } else {
4365 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4366 // be thrown when it's safe to do so to simplify this.
4367 registered_with_another_class_loader = true;
4368 }
4369 }
4370 }
4371 // We need to have released the dex_lock_ to allocate safely.
4372 if (registered_with_another_class_loader) {
4373 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4374 return nullptr;
4375 }
4376 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
4377 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4378 DCHECK(linear_alloc != nullptr);
4379 ClassTable* table;
4380 {
4381 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4382 table = InsertClassTableForClassLoader(class_loader);
4383 }
4384 // Don't alloc while holding the lock, since allocation may need to
4385 // suspend all threads and another thread may need the dex_lock_ to
4386 // get to a suspend point.
4387 StackHandleScope<3> hs(self);
4388 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4389 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
4390 {
4391 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4392 // a thread holding the dex lock and blocking on a condition variable regarding
4393 // weak references access, and a thread blocking on the dex lock.
4394 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
4395 WriterMutexLock mu(self, *Locks::dex_lock_);
4396 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4397 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4398 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
4399 // Do Initialize while holding dex lock to make sure two threads don't call it
4400 // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4401 // DCHECK that the arrays are null.
4402 h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
4403 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
4404 }
4405 if (old_dex_cache != nullptr) {
4406 // Another thread managed to initialize the dex cache faster, so use that DexCache.
4407 // If this thread encountered OOME, ignore it.
4408 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4409 self->ClearException();
4410 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4411 // dex_lock_.
4412 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4413 return old_dex_cache;
4414 } else {
4415 registered_with_another_class_loader = true;
4416 }
4417 }
4418 }
4419 if (registered_with_another_class_loader) {
4420 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4421 return nullptr;
4422 }
4423 if (h_dex_cache == nullptr) {
4424 self->AssertPendingOOMException();
4425 return nullptr;
4426 }
4427 if (table->InsertStrongRoot(h_dex_cache.Get())) {
4428 WriteBarrierOnClassLoader(self, h_class_loader.Get(), h_dex_cache.Get());
4429 } else {
4430 // Write-barrier not required if strong-root isn't inserted.
4431 }
4432 VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
4433 PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
4434 return h_dex_cache.Get();
4435 }
4436
IsDexFileRegistered(Thread * self,const DexFile & dex_file)4437 bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
4438 ReaderMutexLock mu(self, *Locks::dex_lock_);
4439 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
4440 }
4441
FindDexCache(Thread * self,const DexFile & dex_file)4442 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4443 ReaderMutexLock mu(self, *Locks::dex_lock_);
4444 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4445 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4446 if (dex_cache != nullptr) {
4447 return dex_cache;
4448 }
4449 // Failure, dump diagnostic and abort.
4450 for (const auto& entry : dex_caches_) {
4451 const DexCacheData& data = entry.second;
4452 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4453 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4454 }
4455 }
4456 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
4457 << " " << &dex_file;
4458 UNREACHABLE();
4459 }
4460
FindDexCache(Thread * self,const OatDexFile & oat_dex_file)4461 ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
4462 ReaderMutexLock mu(self, *Locks::dex_lock_);
4463 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4464 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4465 if (dex_cache != nullptr) {
4466 return dex_cache;
4467 }
4468 // Failure, dump diagnostic and abort.
4469 if (dex_cache_data == nullptr) {
4470 LOG(FATAL_WITHOUT_ABORT) << "NULL dex_cache_data";
4471 } else {
4472 LOG(FATAL_WITHOUT_ABORT)
4473 << "dex_cache_data=" << dex_cache_data
4474 << " weak_root=" << dex_cache_data->weak_root
4475 << " decoded_weak_root=" << self->DecodeJObject(dex_cache_data->weak_root);
4476 }
4477 for (const auto& entry : dex_caches_) {
4478 const DexCacheData& data = entry.second;
4479 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4480 const OatDexFile* other_oat_dex_file = entry.first->GetOatDexFile();
4481 const OatFile* oat_file =
4482 (other_oat_dex_file == nullptr) ? nullptr : other_oat_dex_file->GetOatFile();
4483 LOG(FATAL_WITHOUT_ABORT)
4484 << "Registered dex file " << entry.first->GetLocation()
4485 << " oat_dex_file=" << other_oat_dex_file
4486 << " oat_file=" << oat_file
4487 << " oat_location=" << (oat_file == nullptr ? "null" : oat_file->GetLocation())
4488 << " dex_file=" << &entry.first
4489 << " weak_root=" << data.weak_root
4490 << " decoded_weak_root=" << self->DecodeJObject(data.weak_root)
4491 << " dex_cache_data=" << &data;
4492 }
4493 }
4494 LOG(FATAL) << "Failed to find DexCache for OatDexFile "
4495 << oat_dex_file.GetDexFileLocation()
4496 << " oat_dex_file=" << &oat_dex_file
4497 << " oat_file=" << oat_dex_file.GetOatFile()
4498 << " oat_location=" << oat_dex_file.GetOatFile()->GetLocation();
4499 UNREACHABLE();
4500 }
4501
FindClassTable(Thread * self,ObjPtr<mirror::DexCache> dex_cache)4502 ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4503 const DexFile* dex_file = dex_cache->GetDexFile();
4504 DCHECK(dex_file != nullptr);
4505 ReaderMutexLock mu(self, *Locks::dex_lock_);
4506 auto it = dex_caches_.find(dex_file);
4507 if (it != dex_caches_.end()) {
4508 const DexCacheData& data = it->second;
4509 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4510 if (registered_dex_cache != nullptr) {
4511 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4512 return data.class_table;
4513 }
4514 }
4515 return nullptr;
4516 }
4517
FindDexCacheDataLocked(const OatDexFile & oat_dex_file)4518 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
4519 const OatDexFile& oat_dex_file) {
4520 auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4521 return entry.first->GetOatDexFile() == &oat_dex_file;
4522 });
4523 return it != dex_caches_.end() ? &it->second : nullptr;
4524 }
4525
FindDexCacheDataLocked(const DexFile & dex_file)4526 const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
4527 auto it = dex_caches_.find(&dex_file);
4528 return it != dex_caches_.end() ? &it->second : nullptr;
4529 }
4530
CreatePrimitiveClass(Thread * self,Primitive::Type type,ClassRoot primitive_root)4531 void ClassLinker::CreatePrimitiveClass(Thread* self,
4532 Primitive::Type type,
4533 ClassRoot primitive_root) {
4534 ObjPtr<mirror::Class> primitive_class =
4535 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
4536 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4537 // Do not hold lock on the primitive class object, the initialization of
4538 // primitive classes is done while the process is still single threaded.
4539 primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
4540 primitive_class->SetPrimitiveType(type);
4541 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
4542 DCHECK_EQ(primitive_class->NumMethods(), 0u);
4543 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4544 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
4545 const char* descriptor = Primitive::Descriptor(type);
4546 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
4547 primitive_class,
4548 ComputeModifiedUtf8Hash(descriptor));
4549 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
4550 SetClassRoot(primitive_root, primitive_class);
4551 }
4552
GetArrayIfTable()4553 inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4554 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4555 }
4556
4557 // Create an array class (i.e. the class object for the array, not the
4558 // array itself). "descriptor" looks like "[C" or "[[[[B" or
4559 // "[Ljava/lang/String;".
4560 //
4561 // If "descriptor" refers to an array of primitives, look up the
4562 // primitive type's internally-generated class object.
4563 //
4564 // "class_loader" is the class loader of the class that's referring to
4565 // us. It's used to ensure that we're looking for the element type in
4566 // the right context. It does NOT become the class loader for the
4567 // array class; that always comes from the base element class.
4568 //
4569 // Returns null with an exception raised on failure.
CreateArrayClass(Thread * self,const char * descriptor,size_t hash,Handle<mirror::ClassLoader> class_loader)4570 ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4571 const char* descriptor,
4572 size_t hash,
4573 Handle<mirror::ClassLoader> class_loader) {
4574 // Identify the underlying component type
4575 CHECK_EQ('[', descriptor[0]);
4576 StackHandleScope<2> hs(self);
4577
4578 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4579 // code to be executed. We put it up here so we can avoid all the allocations associated with
4580 // creating the class. This can happen with (eg) jit threads.
4581 if (!self->CanLoadClasses()) {
4582 // Make sure we don't try to load anything, potentially causing an infinite loop.
4583 ObjPtr<mirror::Throwable> pre_allocated =
4584 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4585 self->SetException(pre_allocated);
4586 return nullptr;
4587 }
4588
4589 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4590 class_loader)));
4591 if (component_type == nullptr) {
4592 DCHECK(self->IsExceptionPending());
4593 // We need to accept erroneous classes as component types. Under AOT, we
4594 // don't accept them as we cannot encode the erroneous class in an image.
4595 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4596 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
4597 if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
4598 DCHECK(self->IsExceptionPending());
4599 return nullptr;
4600 } else {
4601 self->ClearException();
4602 }
4603 }
4604 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4605 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4606 return nullptr;
4607 }
4608 // See if the component type is already loaded. Array classes are
4609 // always associated with the class loader of their underlying
4610 // element type -- an array of Strings goes with the loader for
4611 // java/lang/String -- so we need to look for it there. (The
4612 // caller should have checked for the existence of the class
4613 // before calling here, but they did so with *their* class loader,
4614 // not the component type's loader.)
4615 //
4616 // If we find it, the caller adds "loader" to the class' initiating
4617 // loader list, which should prevent us from going through this again.
4618 //
4619 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
4620 // are the same, because our caller (FindClass) just did the
4621 // lookup. (Even if we get this wrong we still have correct behavior,
4622 // because we effectively do this lookup again when we add the new
4623 // class to the hash table --- necessary because of possible races with
4624 // other threads.)
4625 if (class_loader.Get() != component_type->GetClassLoader()) {
4626 ObjPtr<mirror::Class> new_class =
4627 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
4628 if (new_class != nullptr) {
4629 return new_class;
4630 }
4631 }
4632 // Core array classes, i.e. Object[], Class[], String[] and primitive
4633 // arrays, have special initialization and they should be found above.
4634 DCHECK_IMPLIES(component_type->IsObjectClass(),
4635 // Guard from false positives for errors before setting superclass.
4636 component_type->IsErroneousUnresolved());
4637 DCHECK(!component_type->IsStringClass());
4638 DCHECK(!component_type->IsClassClass());
4639 DCHECK(!component_type->IsPrimitive());
4640
4641 // Fill out the fields in the Class.
4642 //
4643 // It is possible to execute some methods against arrays, because
4644 // all arrays are subclasses of java_lang_Object_, so we need to set
4645 // up a vtable. We can just point at the one in java_lang_Object_.
4646 //
4647 // Array classes are simple enough that we don't need to do a full
4648 // link step.
4649 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4650 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4651 size_t usable_size)
4652 REQUIRES_SHARED(Locks::mutator_lock_) {
4653 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
4654 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4655 init_class(obj, usable_size);
4656 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4657 klass->SetComponentType(component_type.Get());
4658 // Do not hold lock for initialization, the fence issued after the visitor
4659 // returns ensures memory visibility together with the implicit consume
4660 // semantics (for all supported architectures) for any thread that loads
4661 // the array class reference from any memory locations afterwards.
4662 FinishArrayClassSetup(klass);
4663 };
4664 auto new_class = hs.NewHandle<mirror::Class>(
4665 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
4666 if (new_class == nullptr) {
4667 self->AssertPendingOOMException();
4668 return nullptr;
4669 }
4670
4671 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
4672 if (existing == nullptr) {
4673 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4674 // duplicate events in case of races. Array classes don't really follow dedicated
4675 // load and prepare, anyways.
4676 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4677 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4678
4679 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
4680 return new_class.Get();
4681 }
4682 // Another thread must have loaded the class after we
4683 // started but before we finished. Abandon what we've
4684 // done.
4685 //
4686 // (Yes, this happens.)
4687
4688 return existing;
4689 }
4690
LookupPrimitiveClass(char type)4691 ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4692 ClassRoot class_root;
4693 switch (type) {
4694 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4695 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4696 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4697 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4698 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4699 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4700 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4701 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4702 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
4703 default:
4704 return nullptr;
4705 }
4706 return GetClassRoot(class_root, this);
4707 }
4708
FindPrimitiveClass(char type)4709 ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4710 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4711 if (UNLIKELY(result == nullptr)) {
4712 std::string printable_type(PrintableChar(type));
4713 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4714 }
4715 return result;
4716 }
4717
InsertClass(const char * descriptor,ObjPtr<mirror::Class> klass,size_t hash)4718 ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4719 ObjPtr<mirror::Class> klass,
4720 size_t hash) {
4721 DCHECK(Thread::Current()->CanLoadClasses());
4722 if (VLOG_IS_ON(class_linker)) {
4723 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
4724 std::string source;
4725 if (dex_cache != nullptr) {
4726 source += " from ";
4727 source += dex_cache->GetLocation()->ToModifiedUtf8();
4728 }
4729 LOG(INFO) << "Loaded class " << descriptor << source;
4730 }
4731 {
4732 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4733 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
4734 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
4735 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
4736 if (existing != nullptr) {
4737 return existing;
4738 }
4739 VerifyObject(klass);
4740 class_table->InsertWithHash(klass, hash);
4741 WriteBarrierOnClassLoaderLocked(class_loader, klass);
4742 }
4743 if (kIsDebugBuild) {
4744 // Test that copied methods correctly can find their holder.
4745 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4746 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4747 }
4748 }
4749 return nullptr;
4750 }
4751
WriteBarrierForBootOatFileBssRoots(const OatFile * oat_file)4752 void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
4753 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4754 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4755 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4756 new_bss_roots_boot_oat_files_.push_back(oat_file);
4757 }
4758 }
4759
4760 // TODO This should really be in mirror::Class.
UpdateClassMethods(ObjPtr<mirror::Class> klass,LengthPrefixedArray<ArtMethod> * new_methods)4761 void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
4762 LengthPrefixedArray<ArtMethod>* new_methods) {
4763 klass->SetMethodsPtrUnchecked(new_methods,
4764 klass->NumDirectMethods(),
4765 klass->NumDeclaredVirtualMethods());
4766 // Need to mark the card so that the remembered sets and mod union tables get updated.
4767 WriteBarrier::ForEveryFieldWrite(klass);
4768 }
4769
LookupClass(Thread * self,const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)4770 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4771 const char* descriptor,
4772 ObjPtr<mirror::ClassLoader> class_loader) {
4773 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4774 }
4775
LookupClass(Thread * self,const char * descriptor,size_t hash,ObjPtr<mirror::ClassLoader> class_loader)4776 ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4777 const char* descriptor,
4778 size_t hash,
4779 ObjPtr<mirror::ClassLoader> class_loader) {
4780 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4781 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4782 if (class_table != nullptr) {
4783 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
4784 if (result != nullptr) {
4785 return result;
4786 }
4787 }
4788 return nullptr;
4789 }
4790
4791 class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4792 public:
MoveClassTableToPreZygoteVisitor()4793 MoveClassTableToPreZygoteVisitor() {}
4794
Visit(ObjPtr<mirror::ClassLoader> class_loader)4795 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4796 REQUIRES(Locks::classlinker_classes_lock_)
4797 REQUIRES_SHARED(Locks::mutator_lock_) override {
4798 ClassTable* const class_table = class_loader->GetClassTable();
4799 if (class_table != nullptr) {
4800 class_table->FreezeSnapshot();
4801 }
4802 }
4803 };
4804
MoveClassTableToPreZygote()4805 void ClassLinker::MoveClassTableToPreZygote() {
4806 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4807 boot_class_table_->FreezeSnapshot();
4808 MoveClassTableToPreZygoteVisitor visitor;
4809 VisitClassLoaders(&visitor);
4810 }
4811
4812 // Look up classes by hash and descriptor and put all matching ones in the result array.
4813 class LookupClassesVisitor : public ClassLoaderVisitor {
4814 public:
LookupClassesVisitor(const char * descriptor,size_t hash,std::vector<ObjPtr<mirror::Class>> * result)4815 LookupClassesVisitor(const char* descriptor,
4816 size_t hash,
4817 std::vector<ObjPtr<mirror::Class>>* result)
4818 : descriptor_(descriptor),
4819 hash_(hash),
4820 result_(result) {}
4821
Visit(ObjPtr<mirror::ClassLoader> class_loader)4822 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
4823 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
4824 ClassTable* const class_table = class_loader->GetClassTable();
4825 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
4826 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4827 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
4828 result_->push_back(klass);
4829 }
4830 }
4831
4832 private:
4833 const char* const descriptor_;
4834 const size_t hash_;
4835 std::vector<ObjPtr<mirror::Class>>* const result_;
4836 };
4837
LookupClasses(const char * descriptor,std::vector<ObjPtr<mirror::Class>> & result)4838 void ClassLinker::LookupClasses(const char* descriptor,
4839 std::vector<ObjPtr<mirror::Class>>& result) {
4840 result.clear();
4841 Thread* const self = Thread::Current();
4842 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4843 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
4844 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
4845 if (klass != nullptr) {
4846 DCHECK(klass->GetClassLoader() == nullptr);
4847 result.push_back(klass);
4848 }
4849 LookupClassesVisitor visitor(descriptor, hash, &result);
4850 VisitClassLoaders(&visitor);
4851 }
4852
AttemptSupertypeVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,Handle<mirror::Class> supertype)4853 bool ClassLinker::AttemptSupertypeVerification(Thread* self,
4854 verifier::VerifierDeps* verifier_deps,
4855 Handle<mirror::Class> klass,
4856 Handle<mirror::Class> supertype) {
4857 DCHECK(self != nullptr);
4858 DCHECK(klass != nullptr);
4859 DCHECK(supertype != nullptr);
4860
4861 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
4862 VerifyClass(self, verifier_deps, supertype);
4863 }
4864
4865 if (supertype->IsVerified()
4866 || supertype->ShouldVerifyAtRuntime()
4867 || supertype->IsVerifiedNeedsAccessChecks()) {
4868 // The supertype is either verified, or we soft failed at AOT time.
4869 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
4870 return true;
4871 }
4872 // If we got this far then we have a hard failure.
4873 std::string error_msg =
4874 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
4875 klass->PrettyDescriptor().c_str(),
4876 supertype->PrettyDescriptor().c_str());
4877 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4878 StackHandleScope<1> hs(self);
4879 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
4880 if (cause != nullptr) {
4881 // Set during VerifyClass call (if at all).
4882 self->ClearException();
4883 }
4884 // Change into a verify error.
4885 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
4886 if (cause != nullptr) {
4887 self->GetException()->SetCause(cause.Get());
4888 }
4889 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4890 if (Runtime::Current()->IsAotCompiler()) {
4891 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4892 }
4893 // Need to grab the lock to change status.
4894 ObjectLock<mirror::Class> super_lock(self, klass);
4895 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
4896 return false;
4897 }
4898
VerifyClass(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level)4899 verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4900 verifier::VerifierDeps* verifier_deps,
4901 Handle<mirror::Class> klass,
4902 verifier::HardFailLogMode log_level) {
4903 {
4904 // TODO: assert that the monitor on the Class is held
4905 ObjectLock<mirror::Class> lock(self, klass);
4906
4907 // Is somebody verifying this now?
4908 ClassStatus old_status = klass->GetStatus();
4909 while (old_status == ClassStatus::kVerifying) {
4910 lock.WaitIgnoringInterrupts();
4911 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4912 // case we may see the same status again. b/62912904. This is why the check is
4913 // greater or equal.
4914 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
4915 << "Class '" << klass->PrettyClass()
4916 << "' performed an illegal verification state transition from " << old_status
4917 << " to " << klass->GetStatus();
4918 old_status = klass->GetStatus();
4919 }
4920
4921 // The class might already be erroneous, for example at compile time if we attempted to verify
4922 // this class as a parent to another.
4923 if (klass->IsErroneous()) {
4924 ThrowEarlierClassFailure(klass.Get());
4925 return verifier::FailureKind::kHardFailure;
4926 }
4927
4928 // Don't attempt to re-verify if already verified.
4929 if (klass->IsVerified()) {
4930 if (verifier_deps != nullptr &&
4931 verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4932 !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4933 !Runtime::Current()->IsAotCompiler()) {
4934 // If the klass is verified, but `verifier_deps` did not record it, this
4935 // means we are running background verification of a secondary dex file.
4936 // Re-run the verifier to populate `verifier_deps`.
4937 // No need to run the verification when running on the AOT Compiler, as
4938 // the driver handles those multithreaded cases already.
4939 std::string error_msg;
4940 verifier::FailureKind failure =
4941 PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4942 // We could have soft failures, so just check that we don't have a hard
4943 // failure.
4944 DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4945 }
4946 return verifier::FailureKind::kNoFailure;
4947 }
4948
4949 if (klass->IsVerifiedNeedsAccessChecks()) {
4950 if (!Runtime::Current()->IsAotCompiler()) {
4951 // Mark the class as having a verification attempt to avoid re-running
4952 // the verifier.
4953 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4954 }
4955 return verifier::FailureKind::kAccessChecksFailure;
4956 }
4957
4958 // For AOT, don't attempt to re-verify if we have already found we should
4959 // verify at runtime.
4960 if (klass->ShouldVerifyAtRuntime()) {
4961 CHECK(Runtime::Current()->IsAotCompiler());
4962 return verifier::FailureKind::kSoftFailure;
4963 }
4964
4965 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4966 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
4967
4968 // Skip verification if disabled.
4969 if (!Runtime::Current()->IsVerificationEnabled()) {
4970 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4971 UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
4972 return verifier::FailureKind::kNoFailure;
4973 }
4974 }
4975
4976 VLOG(class_linker) << "Beginning verification for class: "
4977 << klass->PrettyDescriptor()
4978 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4979
4980 // Verify super class.
4981 StackHandleScope<2> hs(self);
4982 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4983 // If we have a superclass and we get a hard verification failure we can return immediately.
4984 if (supertype != nullptr &&
4985 !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
4986 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
4987 return verifier::FailureKind::kHardFailure;
4988 }
4989
4990 // Verify all default super-interfaces.
4991 //
4992 // (1) Don't bother if the superclass has already had a soft verification failure.
4993 //
4994 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4995 // recursive initialization by themselves. This is because when an interface is initialized
4996 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4997 // but choose not to for an optimization. If the interfaces is being verified due to a class
4998 // initialization (which would need all the default interfaces to be verified) the class code
4999 // will trigger the recursive verification anyway.
5000 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
5001 && !klass->IsInterface()) { // See (2)
5002 int32_t iftable_count = klass->GetIfTableCount();
5003 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
5004 // Loop through all interfaces this class has defined. It doesn't matter the order.
5005 for (int32_t i = 0; i < iftable_count; i++) {
5006 iface.Assign(klass->GetIfTable()->GetInterface(i));
5007 DCHECK(iface != nullptr);
5008 // We only care if we have default interfaces and can skip if we are already verified...
5009 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
5010 continue;
5011 } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
5012 // We had a hard failure while verifying this interface. Just return immediately.
5013 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
5014 return verifier::FailureKind::kHardFailure;
5015 } else if (UNLIKELY(!iface->IsVerified())) {
5016 // We softly failed to verify the iface. Stop checking and clean up.
5017 // Put the iface into the supertype handle so we know what caused us to fail.
5018 supertype.Assign(iface.Get());
5019 break;
5020 }
5021 }
5022 }
5023
5024 // At this point if verification failed, then supertype is the "first" supertype that failed
5025 // verification (without a specific order). If verification succeeded, then supertype is either
5026 // null or the original superclass of klass and is verified.
5027 DCHECK(supertype == nullptr ||
5028 supertype.Get() == klass->GetSuperClass() ||
5029 !supertype->IsVerified());
5030
5031 // Try to use verification information from the oat file, otherwise do runtime verification.
5032 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
5033 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
5034 bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
5035
5036 VLOG(class_linker) << "Class preverified status for class "
5037 << klass->PrettyDescriptor()
5038 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5039 << ": "
5040 << preverified
5041 << "( " << oat_file_class_status << ")";
5042
5043 // If the oat file says the class had an error, re-run the verifier. That way we will either:
5044 // 1) Be successful at runtime, or
5045 // 2) Get a precise error message.
5046 DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
5047
5048 std::string error_msg;
5049 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
5050 if (!preverified) {
5051 verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
5052 } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
5053 verifier_failure = verifier::FailureKind::kAccessChecksFailure;
5054 }
5055
5056 // Verification is done, grab the lock again.
5057 ObjectLock<mirror::Class> lock(self, klass);
5058 self->AssertNoPendingException();
5059
5060 if (verifier_failure == verifier::FailureKind::kHardFailure) {
5061 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
5062 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
5063 << " because: " << error_msg;
5064 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
5065 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5066 return verifier_failure;
5067 }
5068
5069 // Make sure all classes referenced by catch blocks are resolved.
5070 ResolveClassExceptionHandlerTypes(klass);
5071
5072 if (Runtime::Current()->IsAotCompiler()) {
5073 if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
5074 // Regardless of our own verification result, we need to verify the class
5075 // at runtime if the super class is not verified. This is required in case
5076 // we generate an app/boot image.
5077 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5078 } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
5079 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5080 } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
5081 verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
5082 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
5083 } else {
5084 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
5085 }
5086 // Notify the compiler about the verification status, in case the class
5087 // was verified implicitly (eg super class of a compiled class). When the
5088 // compiler unloads dex file after compilation, we still want to keep
5089 // verification states.
5090 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
5091 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
5092 } else {
5093 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
5094 }
5095
5096 UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
5097 return verifier_failure;
5098 }
5099
PerformClassVerification(Thread * self,verifier::VerifierDeps * verifier_deps,Handle<mirror::Class> klass,verifier::HardFailLogMode log_level,std::string * error_msg)5100 verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
5101 verifier::VerifierDeps* verifier_deps,
5102 Handle<mirror::Class> klass,
5103 verifier::HardFailLogMode log_level,
5104 std::string* error_msg) {
5105 Runtime* const runtime = Runtime::Current();
5106 StackHandleScope<2> hs(self);
5107 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5108 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5109 return verifier::ClassVerifier::VerifyClass(self,
5110 verifier_deps,
5111 dex_cache->GetDexFile(),
5112 klass,
5113 dex_cache,
5114 class_loader,
5115 *klass->GetClassDef(),
5116 runtime->GetCompilerCallbacks(),
5117 log_level,
5118 Runtime::Current()->GetTargetSdkVersion(),
5119 error_msg);
5120 }
5121
VerifyClassUsingOatFile(Thread * self,const DexFile & dex_file,Handle<mirror::Class> klass,ClassStatus & oat_file_class_status)5122 bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
5123 const DexFile& dex_file,
5124 Handle<mirror::Class> klass,
5125 ClassStatus& oat_file_class_status) {
5126 // If we're compiling, we can only verify the class using the oat file if
5127 // we are not compiling the image or if the class we're verifying is not part of
5128 // the compilation unit (app - dependencies). We will let the compiler callback
5129 // tell us about the latter.
5130 if (Runtime::Current()->IsAotCompiler()) {
5131 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
5132 // We are compiling an app (not the image).
5133 if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
5134 return false;
5135 }
5136 }
5137
5138 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
5139 // In case we run without an image there won't be a backing oat file.
5140 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
5141 return false;
5142 }
5143
5144 uint16_t class_def_index = klass->GetDexClassDefIndex();
5145 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
5146 if (oat_file_class_status >= ClassStatus::kVerified) {
5147 return true;
5148 }
5149 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5150 // We return that the clas has already been verified, and the caller should
5151 // check the class status to ensure we run with access checks.
5152 return true;
5153 }
5154
5155 // Check the class status with the vdex file.
5156 const OatFile* oat_file = oat_dex_file->GetOatFile();
5157 if (oat_file != nullptr) {
5158 ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
5159 if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
5160 VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
5161 oat_file_class_status = vdex_status;
5162 return true;
5163 }
5164 }
5165
5166 // If we only verified a subset of the classes at compile time, we can end up with classes that
5167 // were resolved by the verifier.
5168 if (oat_file_class_status == ClassStatus::kResolved) {
5169 return false;
5170 }
5171 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
5172 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
5173 << klass->PrettyClass() << " " << dex_file.GetLocation();
5174
5175 if (mirror::Class::IsErroneous(oat_file_class_status)) {
5176 // Compile time verification failed with a hard error. We'll re-run
5177 // verification, which might be successful at runtime.
5178 return false;
5179 }
5180 if (oat_file_class_status == ClassStatus::kNotReady) {
5181 // Status is uninitialized if we couldn't determine the status at compile time, for example,
5182 // not loading the class.
5183 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
5184 // isn't a problem and this case shouldn't occur
5185 return false;
5186 }
5187 std::string temp;
5188 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
5189 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
5190 << klass->GetDescriptor(&temp);
5191 UNREACHABLE();
5192 }
5193
ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass)5194 void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
5195 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
5196 ResolveMethodExceptionHandlerTypes(&method);
5197 }
5198 }
5199
ResolveMethodExceptionHandlerTypes(ArtMethod * method)5200 void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
5201 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
5202 CodeItemDataAccessor accessor(method->DexInstructionData());
5203 if (!accessor.HasCodeItem()) {
5204 return; // native or abstract method
5205 }
5206 if (accessor.TriesSize() == 0) {
5207 return; // nothing to process
5208 }
5209 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
5210 CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
5211 << method->PrettyMethod()
5212 << "@" << method->GetDexFile()->GetLocation()
5213 << "@" << reinterpret_cast<const void*>(handlers_ptr)
5214 << " is_compact_dex=" << method->GetDexFile()->IsCompactDexFile();
5215
5216 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
5217 for (uint32_t idx = 0; idx < handlers_size; idx++) {
5218 CatchHandlerIterator iterator(handlers_ptr);
5219 for (; iterator.HasNext(); iterator.Next()) {
5220 // Ensure exception types are resolved so that they don't need resolution to be delivered,
5221 // unresolved exception types will be ignored by exception delivery
5222 if (iterator.GetHandlerTypeIndex().IsValid()) {
5223 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
5224 if (exception_type == nullptr) {
5225 DCHECK(Thread::Current()->IsExceptionPending());
5226 Thread::Current()->ClearException();
5227 }
5228 }
5229 }
5230 handlers_ptr = iterator.EndDataPointer();
5231 }
5232 }
5233
CreateProxyClass(ScopedObjectAccessAlreadyRunnable & soa,jstring name,jobjectArray interfaces,jobject loader,jobjectArray methods,jobjectArray throws)5234 ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
5235 jstring name,
5236 jobjectArray interfaces,
5237 jobject loader,
5238 jobjectArray methods,
5239 jobjectArray throws) {
5240 Thread* self = soa.Self();
5241
5242 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
5243 // code to be executed. We put it up here so we can avoid all the allocations associated with
5244 // creating the class. This can happen with (eg) jit-threads.
5245 if (!self->CanLoadClasses()) {
5246 // Make sure we don't try to load anything, potentially causing an infinite loop.
5247 ObjPtr<mirror::Throwable> pre_allocated =
5248 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
5249 self->SetException(pre_allocated);
5250 return nullptr;
5251 }
5252
5253 StackHandleScope<12> hs(self);
5254 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
5255 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
5256 if (temp_klass == nullptr) {
5257 CHECK(self->IsExceptionPending()); // OOME.
5258 return nullptr;
5259 }
5260 DCHECK(temp_klass->GetClass() != nullptr);
5261 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
5262 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
5263 // the methods.
5264 temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
5265 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
5266 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
5267 temp_klass->SetName(soa.Decode<mirror::String>(name));
5268 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
5269 // Object has an empty iftable, copy it for that reason.
5270 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
5271 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
5272 std::string storage;
5273 const char* descriptor = temp_klass->GetDescriptor(&storage);
5274 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
5275
5276 // Needs to be before we insert the class so that the allocator field is set.
5277 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
5278
5279 // Insert the class before loading the fields as the field roots
5280 // (ArtField::declaring_class_) are only visited from the class
5281 // table. There can't be any suspend points between inserting the
5282 // class and setting the field arrays below.
5283 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
5284 CHECK(existing == nullptr);
5285
5286 // Instance fields are inherited, but we add a couple of static fields...
5287 const size_t num_fields = 2;
5288 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
5289 temp_klass->SetSFieldsPtr(sfields);
5290
5291 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
5292 // our proxy, so Class.getInterfaces doesn't return the flattened set.
5293 ArtField& interfaces_sfield = sfields->At(0);
5294 interfaces_sfield.SetDexFieldIndex(0);
5295 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
5296 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5297
5298 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
5299 ArtField& throws_sfield = sfields->At(1);
5300 throws_sfield.SetDexFieldIndex(1);
5301 throws_sfield.SetDeclaringClass(temp_klass.Get());
5302 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
5303
5304 // Proxies have 1 direct method, the constructor
5305 const size_t num_direct_methods = 1;
5306
5307 // The array we get passed contains all methods, including private and static
5308 // ones that aren't proxied. We need to filter those out since only interface
5309 // methods (non-private & virtual) are actually proxied.
5310 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
5311 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
5312 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
5313 << mirror::Class::PrettyClass(h_methods->GetClass());
5314 // List of the actual virtual methods this class will have.
5315 std::vector<ArtMethod*> proxied_methods;
5316 std::vector<size_t> proxied_throws_idx;
5317 proxied_methods.reserve(h_methods->GetLength());
5318 proxied_throws_idx.reserve(h_methods->GetLength());
5319 // Filter out to only the non-private virtual methods.
5320 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
5321 ArtMethod* m = mirror->GetArtMethod();
5322 if (!m->IsPrivate() && !m->IsStatic()) {
5323 proxied_methods.push_back(m);
5324 proxied_throws_idx.push_back(idx);
5325 }
5326 }
5327 const size_t num_virtual_methods = proxied_methods.size();
5328 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5329 // contains an array of all the classes each function is declared to throw.
5330 // This is used to wrap unexpected exceptions in a
5331 // UndeclaredThrowableException exception. This array is in the same order as
5332 // the methods array and like the methods array must be filtered to remove any
5333 // non-proxied methods.
5334 const bool has_filtered_methods =
5335 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5336 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5337 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5338 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5339 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5340 (has_filtered_methods)
5341 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5342 self, original_proxied_throws->GetClass(), num_virtual_methods)
5343 : original_proxied_throws.Get()));
5344 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5345 self->AssertPendingOOMException();
5346 return nullptr;
5347 }
5348 if (has_filtered_methods) {
5349 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5350 DCHECK_LE(new_idx, orig_idx);
5351 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5352 }
5353 }
5354
5355 // Create the methods array.
5356 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5357 self, allocator, num_direct_methods + num_virtual_methods);
5358 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5359 // want to throw OOM in the future.
5360 if (UNLIKELY(proxy_class_methods == nullptr)) {
5361 self->AssertPendingOOMException();
5362 return nullptr;
5363 }
5364 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
5365
5366 // Create the single direct method.
5367 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
5368
5369 // Create virtual method using specified prototypes.
5370 // TODO These should really use the iterators.
5371 for (size_t i = 0; i < num_virtual_methods; ++i) {
5372 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5373 auto* prototype = proxied_methods[i];
5374 CreateProxyMethod(temp_klass, prototype, virtual_method);
5375 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5376 DCHECK(prototype->GetDeclaringClass() != nullptr);
5377 }
5378
5379 // The super class is java.lang.reflect.Proxy
5380 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
5381 // Now effectively in the loaded state.
5382 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
5383 self->AssertNoPendingException();
5384
5385 // At this point the class is loaded. Publish a ClassLoad event.
5386 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5387 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5388
5389 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
5390 {
5391 // Must hold lock on object when resolved.
5392 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
5393 // Link the fields and virtual methods, creating vtable and iftables.
5394 // The new class will replace the old one in the class table.
5395 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
5396 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
5397 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
5398 if (!temp_klass->IsErroneous()) {
5399 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5400 }
5401 return nullptr;
5402 }
5403 }
5404 CHECK(temp_klass->IsRetired());
5405 CHECK_NE(temp_klass.Get(), klass.Get());
5406
5407 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
5408 interfaces_sfield.SetObject<false>(
5409 klass.Get(),
5410 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5411 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5412 throws_sfield.SetObject<false>(
5413 klass.Get(),
5414 proxied_throws.Get());
5415
5416 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5417
5418 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5419 // See also ClassLinker::EnsureInitialized().
5420 if (kBitstringSubtypeCheckEnabled) {
5421 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5422 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5423 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5424 }
5425
5426 VisiblyInitializedCallback* callback = nullptr;
5427 {
5428 // Lock on klass is released. Lock new class object.
5429 ObjectLock<mirror::Class> initialization_lock(self, klass);
5430 // Conservatively go through the ClassStatus::kInitialized state.
5431 callback = MarkClassInitialized(self, klass);
5432 }
5433 if (callback != nullptr) {
5434 callback->MakeVisible(self);
5435 }
5436
5437 // Consistency checks.
5438 if (kIsDebugBuild) {
5439 CHECK(klass->GetIFieldsPtr() == nullptr);
5440 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5441
5442 for (size_t i = 0; i < num_virtual_methods; ++i) {
5443 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
5444 CheckProxyMethod(virtual_method, proxied_methods[i]);
5445 }
5446
5447 StackHandleScope<1> hs2(self);
5448 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
5449 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
5450 decoded_name->ToModifiedUtf8().c_str()));
5451 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
5452
5453 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
5454 decoded_name->ToModifiedUtf8().c_str()));
5455 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
5456
5457 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
5458 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
5459 CHECK_EQ(klass.Get()->GetProxyThrows(),
5460 proxied_throws.Get());
5461 }
5462 return klass.Get();
5463 }
5464
CreateProxyConstructor(Handle<mirror::Class> klass,ArtMethod * out)5465 void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5466 // Create constructor for Proxy that must initialize the method.
5467 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5468 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
5469
5470 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5471 // on which front-end compiler was used to build the libcore DEX files.
5472 ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
5473 DCHECK(proxy_constructor != nullptr)
5474 << "Could not find <init> method in java.lang.reflect.Proxy";
5475
5476 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5477 // code_ too)
5478 DCHECK(out != nullptr);
5479 out->CopyFrom(proxy_constructor, image_pointer_size_);
5480 // Make this constructor public and fix the class to be our Proxy version.
5481 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5482 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
5483 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5484 kAccPublic |
5485 kAccCompileDontBother);
5486 out->SetDeclaringClass(klass.Get());
5487
5488 // Set the original constructor method.
5489 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
5490 }
5491
CheckProxyConstructor(ArtMethod * constructor) const5492 void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
5493 CHECK(constructor->IsConstructor());
5494 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5495 CHECK_STREQ(np->GetName(), "<init>");
5496 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
5497 DCHECK(constructor->IsPublic());
5498 }
5499
CreateProxyMethod(Handle<mirror::Class> klass,ArtMethod * prototype,ArtMethod * out)5500 void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
5501 ArtMethod* out) {
5502 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
5503 // as necessary
5504 DCHECK(out != nullptr);
5505 out->CopyFrom(prototype, image_pointer_size_);
5506
5507 // Set class to be the concrete proxy class.
5508 out->SetDeclaringClass(klass.Get());
5509 // Clear the abstract and default flags to ensure that defaults aren't picked in
5510 // preference to the invocation handler.
5511 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
5512 // Make the method final.
5513 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5514 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
5515 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5516
5517 // Set the original interface method.
5518 out->SetDataPtrSize(prototype, image_pointer_size_);
5519
5520 // At runtime the method looks like a reference and argument saving method, clone the code
5521 // related parameters from this method.
5522 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
5523 }
5524
CheckProxyMethod(ArtMethod * method,ArtMethod * prototype) const5525 void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
5526 // Basic consistency checks.
5527 CHECK(!prototype->IsFinal());
5528 CHECK(method->IsFinal());
5529 CHECK(method->IsInvokable());
5530
5531 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5532 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
5533 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
5534 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
5535 }
5536
CanWeInitializeClass(ObjPtr<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5537 bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
5538 bool can_init_statics,
5539 bool can_init_parents) {
5540 if (can_init_statics && can_init_parents) {
5541 return true;
5542 }
5543 DCHECK(Runtime::Current()->IsAotCompiler());
5544
5545 // We currently don't support initializing at AOT time classes that need access
5546 // checks.
5547 if (klass->IsVerifiedNeedsAccessChecks()) {
5548 return false;
5549 }
5550 if (!can_init_statics) {
5551 // Check if there's a class initializer.
5552 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5553 if (clinit != nullptr) {
5554 return false;
5555 }
5556 // Check if there are encoded static values needing initialization.
5557 if (klass->NumStaticFields() != 0) {
5558 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5559 DCHECK(dex_class_def != nullptr);
5560 if (dex_class_def->static_values_off_ != 0) {
5561 return false;
5562 }
5563 }
5564 }
5565 // If we are a class we need to initialize all interfaces with default methods when we are
5566 // initialized. Check all of them.
5567 if (!klass->IsInterface()) {
5568 size_t num_interfaces = klass->GetIfTableCount();
5569 for (size_t i = 0; i < num_interfaces; i++) {
5570 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5571 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5572 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
5573 return false;
5574 }
5575 }
5576 }
5577 }
5578 if (klass->IsInterface() || !klass->HasSuperClass()) {
5579 return true;
5580 }
5581 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5582 if (super_class->IsInitialized()) {
5583 return true;
5584 }
5585 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
5586 }
5587
InitializeClass(Thread * self,Handle<mirror::Class> klass,bool can_init_statics,bool can_init_parents)5588 bool ClassLinker::InitializeClass(Thread* self,
5589 Handle<mirror::Class> klass,
5590 bool can_init_statics,
5591 bool can_init_parents) {
5592 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5593
5594 // Are we already initialized and therefore done?
5595 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5596 // an initialized class will never change its state.
5597 if (klass->IsInitialized()) {
5598 return true;
5599 }
5600
5601 // Fast fail if initialization requires a full runtime. Not part of the JLS.
5602 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
5603 return false;
5604 }
5605
5606 self->AllowThreadSuspension();
5607 Runtime* const runtime = Runtime::Current();
5608 const bool stats_enabled = runtime->HasStatsEnabled();
5609 uint64_t t0;
5610 {
5611 ObjectLock<mirror::Class> lock(self, klass);
5612
5613 // Re-check under the lock in case another thread initialized ahead of us.
5614 if (klass->IsInitialized()) {
5615 return true;
5616 }
5617
5618 // Was the class already found to be erroneous? Done under the lock to match the JLS.
5619 if (klass->IsErroneous()) {
5620 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
5621 VlogClassInitializationFailure(klass);
5622 return false;
5623 }
5624
5625 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5626 << klass->PrettyClass() << ": state=" << klass->GetStatus();
5627
5628 if (!klass->IsVerified()) {
5629 VerifyClass(self, /*verifier_deps= */ nullptr, klass);
5630 if (!klass->IsVerified()) {
5631 // We failed to verify, expect either the klass to be erroneous or verification failed at
5632 // compile time.
5633 if (klass->IsErroneous()) {
5634 // The class is erroneous. This may be a verifier error, or another thread attempted
5635 // verification and/or initialization and failed. We can distinguish those cases by
5636 // whether an exception is already pending.
5637 if (self->IsExceptionPending()) {
5638 // Check that it's a VerifyError.
5639 DCHECK(IsVerifyError(self->GetException()));
5640 } else {
5641 // Check that another thread attempted initialization.
5642 DCHECK_NE(0, klass->GetClinitThreadId());
5643 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5644 // Need to rethrow the previous failure now.
5645 ThrowEarlierClassFailure(klass.Get(), true);
5646 }
5647 VlogClassInitializationFailure(klass);
5648 } else {
5649 CHECK(Runtime::Current()->IsAotCompiler());
5650 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
5651 self->AssertNoPendingException();
5652 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
5653 }
5654 self->AssertPendingException();
5655 return false;
5656 } else {
5657 self->AssertNoPendingException();
5658 }
5659
5660 // A separate thread could have moved us all the way to initialized. A "simple" example
5661 // involves a subclass of the current class being initialized at the same time (which
5662 // will implicitly initialize the superclass, if scheduled that way). b/28254258
5663 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
5664 if (klass->IsInitialized()) {
5665 return true;
5666 }
5667 }
5668
5669 // If the class is ClassStatus::kInitializing, either this thread is
5670 // initializing higher up the stack or another thread has beat us
5671 // to initializing and we need to wait. Either way, this
5672 // invocation of InitializeClass will not be responsible for
5673 // running <clinit> and will return.
5674 if (klass->GetStatus() == ClassStatus::kInitializing) {
5675 // Could have got an exception during verification.
5676 if (self->IsExceptionPending()) {
5677 VlogClassInitializationFailure(klass);
5678 return false;
5679 }
5680 // We caught somebody else in the act; was it us?
5681 if (klass->GetClinitThreadId() == self->GetTid()) {
5682 // Yes. That's fine. Return so we can continue initializing.
5683 return true;
5684 }
5685 // No. That's fine. Wait for another thread to finish initializing.
5686 return WaitForInitializeClass(klass, self, lock);
5687 }
5688
5689 // Try to get the oat class's status for this class if the oat file is present. The compiler
5690 // tries to validate superclass descriptors, and writes the result into the oat file.
5691 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5692 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5693 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
5694 bool has_oat_class = false;
5695 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5696 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5697 : OatFile::OatClass::Invalid();
5698 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
5699 !ValidateSuperClassDescriptors(klass)) {
5700 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5701 return false;
5702 }
5703 self->AllowThreadSuspension();
5704
5705 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
5706 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
5707
5708 // From here out other threads may observe that we're initializing and so changes of state
5709 // require the a notification.
5710 klass->SetClinitThreadId(self->GetTid());
5711 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
5712
5713 t0 = stats_enabled ? NanoTime() : 0u;
5714 }
5715
5716 uint64_t t_sub = 0;
5717
5718 // Initialize super classes, must be done while initializing for the JLS.
5719 if (!klass->IsInterface() && klass->HasSuperClass()) {
5720 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
5721 if (!super_class->IsInitialized()) {
5722 CHECK(!super_class->IsInterface());
5723 CHECK(can_init_parents);
5724 StackHandleScope<1> hs(self);
5725 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
5726 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
5727 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
5728 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
5729 if (!super_initialized) {
5730 // The super class was verified ahead of entering initializing, we should only be here if
5731 // the super class became erroneous due to initialization.
5732 // For the case of aot compiler, the super class might also be initializing but we don't
5733 // want to process circular dependencies in pre-compile.
5734 CHECK(self->IsExceptionPending())
5735 << "Super class initialization failed for "
5736 << handle_scope_super->PrettyDescriptor()
5737 << " that has unexpected status " << handle_scope_super->GetStatus()
5738 << "\nPending exception:\n"
5739 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
5740 ObjectLock<mirror::Class> lock(self, klass);
5741 // Initialization failed because the super-class is erroneous.
5742 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5743 return false;
5744 }
5745 t_sub = super_t1 - super_t0;
5746 }
5747 }
5748
5749 if (!klass->IsInterface()) {
5750 // Initialize interfaces with default methods for the JLS.
5751 size_t num_direct_interfaces = klass->NumDirectInterfaces();
5752 // Only setup the (expensive) handle scope if we actually need to.
5753 if (UNLIKELY(num_direct_interfaces > 0)) {
5754 StackHandleScope<1> hs_iface(self);
5755 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5756 for (size_t i = 0; i < num_direct_interfaces; i++) {
5757 handle_scope_iface.Assign(klass->GetDirectInterface(i));
5758 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
5759 CHECK(handle_scope_iface->IsInterface());
5760 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5761 // We have already done this for this interface. Skip it.
5762 continue;
5763 }
5764 // We cannot just call initialize class directly because we need to ensure that ALL
5765 // interfaces with default methods are initialized. Non-default interface initialization
5766 // will not affect other non-default super-interfaces.
5767 // This is not very precise, misses all walking.
5768 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
5769 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5770 handle_scope_iface,
5771 can_init_statics,
5772 can_init_parents);
5773 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
5774 if (!iface_initialized) {
5775 ObjectLock<mirror::Class> lock(self, klass);
5776 // Initialization failed because one of our interfaces with default methods is erroneous.
5777 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5778 return false;
5779 }
5780 t_sub += inf_t1 - inf_t0;
5781 }
5782 }
5783 }
5784
5785 const size_t num_static_fields = klass->NumStaticFields();
5786 if (num_static_fields > 0) {
5787 const dex::ClassDef* dex_class_def = klass->GetClassDef();
5788 CHECK(dex_class_def != nullptr);
5789 StackHandleScope<3> hs(self);
5790 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
5791 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
5792
5793 // Eagerly fill in static fields so that the we don't have to do as many expensive
5794 // Class::FindStaticField in ResolveField.
5795 for (size_t i = 0; i < num_static_fields; ++i) {
5796 ArtField* field = klass->GetStaticField(i);
5797 const uint32_t field_idx = field->GetDexFieldIndex();
5798 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
5799 if (resolved_field == nullptr) {
5800 // Populating cache of a dex file which defines `klass` should always be allowed.
5801 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5802 field,
5803 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5804 hiddenapi::AccessMethod::kNone));
5805 dex_cache->SetResolvedField(field_idx, field);
5806 } else {
5807 DCHECK_EQ(field, resolved_field);
5808 }
5809 }
5810
5811 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5812 class_loader,
5813 this,
5814 *dex_class_def);
5815 const DexFile& dex_file = *dex_cache->GetDexFile();
5816
5817 if (value_it.HasNext()) {
5818 ClassAccessor accessor(dex_file, *dex_class_def);
5819 CHECK(can_init_statics);
5820 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5821 if (!value_it.HasNext()) {
5822 break;
5823 }
5824 ArtField* art_field = ResolveField(field.GetIndex(),
5825 dex_cache,
5826 class_loader,
5827 /* is_static= */ true);
5828 if (Runtime::Current()->IsActiveTransaction()) {
5829 value_it.ReadValueToField<true>(art_field);
5830 } else {
5831 value_it.ReadValueToField<false>(art_field);
5832 }
5833 if (self->IsExceptionPending()) {
5834 break;
5835 }
5836 value_it.Next();
5837 }
5838 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
5839 }
5840 }
5841
5842
5843 if (!self->IsExceptionPending()) {
5844 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5845 if (clinit != nullptr) {
5846 CHECK(can_init_statics);
5847 JValue result;
5848 clinit->Invoke(self, nullptr, 0, &result, "V");
5849 }
5850 }
5851 self->AllowThreadSuspension();
5852 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
5853
5854 VisiblyInitializedCallback* callback = nullptr;
5855 bool success = true;
5856 {
5857 ObjectLock<mirror::Class> lock(self, klass);
5858
5859 if (self->IsExceptionPending()) {
5860 WrapExceptionInInitializer(klass);
5861 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5862 success = false;
5863 } else if (Runtime::Current()->IsActiveTransaction() && IsTransactionAborted()) {
5864 // The exception thrown when the transaction aborted has been caught and cleared
5865 // so we need to throw it again now.
5866 VLOG(compiler) << "Return from class initializer of "
5867 << mirror::Class::PrettyDescriptor(klass.Get())
5868 << " without exception while transaction was aborted: re-throw it now.";
5869 ThrowTransactionAbortError(self);
5870 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5871 success = false;
5872 } else {
5873 if (stats_enabled) {
5874 RuntimeStats* global_stats = runtime->GetStats();
5875 RuntimeStats* thread_stats = self->GetStats();
5876 ++global_stats->class_init_count;
5877 ++thread_stats->class_init_count;
5878 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5879 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5880 }
5881 // Set the class as initialized except if failed to initialize static fields.
5882 callback = MarkClassInitialized(self, klass);
5883 if (VLOG_IS_ON(class_linker)) {
5884 std::string temp;
5885 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
5886 klass->GetLocation();
5887 }
5888 }
5889 }
5890 if (callback != nullptr) {
5891 callback->MakeVisible(self);
5892 }
5893 return success;
5894 }
5895
5896 // We recursively run down the tree of interfaces. We need to do this in the order they are declared
5897 // and perform the initialization only on those interfaces that contain default methods.
InitializeDefaultInterfaceRecursive(Thread * self,Handle<mirror::Class> iface,bool can_init_statics,bool can_init_parents)5898 bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5899 Handle<mirror::Class> iface,
5900 bool can_init_statics,
5901 bool can_init_parents) {
5902 CHECK(iface->IsInterface());
5903 size_t num_direct_ifaces = iface->NumDirectInterfaces();
5904 // Only create the (expensive) handle scope if we need it.
5905 if (UNLIKELY(num_direct_ifaces > 0)) {
5906 StackHandleScope<1> hs(self);
5907 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5908 // First we initialize all of iface's super-interfaces recursively.
5909 for (size_t i = 0; i < num_direct_ifaces; i++) {
5910 ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
5911 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
5912 if (!super_iface->HasBeenRecursivelyInitialized()) {
5913 // Recursive step
5914 handle_super_iface.Assign(super_iface);
5915 if (!InitializeDefaultInterfaceRecursive(self,
5916 handle_super_iface,
5917 can_init_statics,
5918 can_init_parents)) {
5919 return false;
5920 }
5921 }
5922 }
5923 }
5924
5925 bool result = true;
5926 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5927 // initialize if we don't have default methods.
5928 if (iface->HasDefaultMethods()) {
5929 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5930 }
5931
5932 // Mark that this interface has undergone recursive default interface initialization so we know we
5933 // can skip it on any later class initializations. We do this even if we are not a default
5934 // interface since we can still avoid the traversal. This is purely a performance optimization.
5935 if (result) {
5936 // TODO This should be done in a better way
5937 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5938 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5939 // initialization is a performance optimization (to avoid another idempotent visit
5940 // for other implementing classes/interfaces), and can be revisited later.
5941 ObjectTryLock<mirror::Class> lock(self, iface);
5942 if (lock.Acquired()) {
5943 iface->SetRecursivelyInitialized();
5944 }
5945 }
5946 return result;
5947 }
5948
WaitForInitializeClass(Handle<mirror::Class> klass,Thread * self,ObjectLock<mirror::Class> & lock)5949 bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5950 Thread* self,
5951 ObjectLock<mirror::Class>& lock)
5952 REQUIRES_SHARED(Locks::mutator_lock_) {
5953 while (true) {
5954 self->AssertNoPendingException();
5955 CHECK(!klass->IsInitialized());
5956 lock.WaitIgnoringInterrupts();
5957
5958 // When we wake up, repeat the test for init-in-progress. If
5959 // there's an exception pending (only possible if
5960 // we were not using WaitIgnoringInterrupts), bail out.
5961 if (self->IsExceptionPending()) {
5962 WrapExceptionInInitializer(klass);
5963 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
5964 return false;
5965 }
5966 // Spurious wakeup? Go back to waiting.
5967 if (klass->GetStatus() == ClassStatus::kInitializing) {
5968 continue;
5969 }
5970 if (klass->GetStatus() == ClassStatus::kVerified &&
5971 Runtime::Current()->IsAotCompiler()) {
5972 // Compile time initialization failed.
5973 return false;
5974 }
5975 if (klass->IsErroneous()) {
5976 // The caller wants an exception, but it was thrown in a
5977 // different thread. Synthesize one here.
5978 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
5979 klass->PrettyDescriptor().c_str());
5980 VlogClassInitializationFailure(klass);
5981 return false;
5982 }
5983 if (klass->IsInitialized()) {
5984 return true;
5985 }
5986 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
5987 << klass->GetStatus();
5988 }
5989 UNREACHABLE();
5990 }
5991
ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m)5992 static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5993 Handle<mirror::Class> super_klass,
5994 ArtMethod* method,
5995 ArtMethod* m)
5996 REQUIRES_SHARED(Locks::mutator_lock_) {
5997 DCHECK(Thread::Current()->IsExceptionPending());
5998 DCHECK(!m->IsProxyMethod());
5999 const DexFile* dex_file = m->GetDexFile();
6000 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
6001 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
6002 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
6003 std::string return_type = dex_file->PrettyType(return_type_idx);
6004 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6005 ThrowWrappedLinkageError(klass.Get(),
6006 "While checking class %s method %s signature against %s %s: "
6007 "Failed to resolve return type %s with %s",
6008 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6009 ArtMethod::PrettyMethod(method).c_str(),
6010 super_klass->IsInterface() ? "interface" : "superclass",
6011 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6012 return_type.c_str(), class_loader.c_str());
6013 }
6014
ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,ArtMethod * m,uint32_t index,dex::TypeIndex arg_type_idx)6015 static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
6016 Handle<mirror::Class> super_klass,
6017 ArtMethod* method,
6018 ArtMethod* m,
6019 uint32_t index,
6020 dex::TypeIndex arg_type_idx)
6021 REQUIRES_SHARED(Locks::mutator_lock_) {
6022 DCHECK(Thread::Current()->IsExceptionPending());
6023 DCHECK(!m->IsProxyMethod());
6024 const DexFile* dex_file = m->GetDexFile();
6025 std::string arg_type = dex_file->PrettyType(arg_type_idx);
6026 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
6027 ThrowWrappedLinkageError(klass.Get(),
6028 "While checking class %s method %s signature against %s %s: "
6029 "Failed to resolve arg %u type %s with %s",
6030 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6031 ArtMethod::PrettyMethod(method).c_str(),
6032 super_klass->IsInterface() ? "interface" : "superclass",
6033 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6034 index, arg_type.c_str(), class_loader.c_str());
6035 }
6036
ThrowSignatureMismatch(Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method,const std::string & error_msg)6037 static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
6038 Handle<mirror::Class> super_klass,
6039 ArtMethod* method,
6040 const std::string& error_msg)
6041 REQUIRES_SHARED(Locks::mutator_lock_) {
6042 ThrowLinkageError(klass.Get(),
6043 "Class %s method %s resolves differently in %s %s: %s",
6044 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
6045 ArtMethod::PrettyMethod(method).c_str(),
6046 super_klass->IsInterface() ? "interface" : "superclass",
6047 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
6048 error_msg.c_str());
6049 }
6050
HasSameSignatureWithDifferentClassLoaders(Thread * self,Handle<mirror::Class> klass,Handle<mirror::Class> super_klass,ArtMethod * method1,ArtMethod * method2)6051 static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
6052 Handle<mirror::Class> klass,
6053 Handle<mirror::Class> super_klass,
6054 ArtMethod* method1,
6055 ArtMethod* method2)
6056 REQUIRES_SHARED(Locks::mutator_lock_) {
6057 {
6058 StackHandleScope<1> hs(self);
6059 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
6060 if (UNLIKELY(return_type == nullptr)) {
6061 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
6062 return false;
6063 }
6064 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
6065 if (UNLIKELY(other_return_type == nullptr)) {
6066 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
6067 return false;
6068 }
6069 if (UNLIKELY(other_return_type != return_type.Get())) {
6070 ThrowSignatureMismatch(klass, super_klass, method1,
6071 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
6072 return_type->PrettyClassAndClassLoader().c_str(),
6073 return_type.Get(),
6074 other_return_type->PrettyClassAndClassLoader().c_str(),
6075 other_return_type.Ptr()));
6076 return false;
6077 }
6078 }
6079 const dex::TypeList* types1 = method1->GetParameterTypeList();
6080 const dex::TypeList* types2 = method2->GetParameterTypeList();
6081 if (types1 == nullptr) {
6082 if (types2 != nullptr && types2->Size() != 0) {
6083 ThrowSignatureMismatch(klass, super_klass, method1,
6084 StringPrintf("Type list mismatch with %s",
6085 method2->PrettyMethod(true).c_str()));
6086 return false;
6087 }
6088 return true;
6089 } else if (UNLIKELY(types2 == nullptr)) {
6090 if (types1->Size() != 0) {
6091 ThrowSignatureMismatch(klass, super_klass, method1,
6092 StringPrintf("Type list mismatch with %s",
6093 method2->PrettyMethod(true).c_str()));
6094 return false;
6095 }
6096 return true;
6097 }
6098 uint32_t num_types = types1->Size();
6099 if (UNLIKELY(num_types != types2->Size())) {
6100 ThrowSignatureMismatch(klass, super_klass, method1,
6101 StringPrintf("Type list mismatch with %s",
6102 method2->PrettyMethod(true).c_str()));
6103 return false;
6104 }
6105 for (uint32_t i = 0; i < num_types; ++i) {
6106 StackHandleScope<1> hs(self);
6107 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
6108 Handle<mirror::Class> param_type(hs.NewHandle(
6109 method1->ResolveClassFromTypeIndex(param_type_idx)));
6110 if (UNLIKELY(param_type == nullptr)) {
6111 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6112 method1, i, param_type_idx);
6113 return false;
6114 }
6115 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
6116 ObjPtr<mirror::Class> other_param_type =
6117 method2->ResolveClassFromTypeIndex(other_param_type_idx);
6118 if (UNLIKELY(other_param_type == nullptr)) {
6119 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
6120 method2, i, other_param_type_idx);
6121 return false;
6122 }
6123 if (UNLIKELY(param_type.Get() != other_param_type)) {
6124 ThrowSignatureMismatch(klass, super_klass, method1,
6125 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
6126 i,
6127 param_type->PrettyClassAndClassLoader().c_str(),
6128 param_type.Get(),
6129 other_param_type->PrettyClassAndClassLoader().c_str(),
6130 other_param_type.Ptr()));
6131 return false;
6132 }
6133 }
6134 return true;
6135 }
6136
6137
ValidateSuperClassDescriptors(Handle<mirror::Class> klass)6138 bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
6139 if (klass->IsInterface()) {
6140 return true;
6141 }
6142 // Begin with the methods local to the superclass.
6143 Thread* self = Thread::Current();
6144 StackHandleScope<1> hs(self);
6145 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
6146 if (klass->HasSuperClass() &&
6147 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
6148 super_klass.Assign(klass->GetSuperClass());
6149 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
6150 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
6151 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
6152 if (m != super_m) {
6153 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6154 klass,
6155 super_klass,
6156 m,
6157 super_m))) {
6158 self->AssertPendingException();
6159 return false;
6160 }
6161 }
6162 }
6163 }
6164 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
6165 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
6166 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
6167 uint32_t num_methods = super_klass->NumVirtualMethods();
6168 for (uint32_t j = 0; j < num_methods; ++j) {
6169 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
6170 j, image_pointer_size_);
6171 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
6172 if (m != super_m) {
6173 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
6174 klass,
6175 super_klass,
6176 m,
6177 super_m))) {
6178 self->AssertPendingException();
6179 return false;
6180 }
6181 }
6182 }
6183 }
6184 }
6185 return true;
6186 }
6187
EnsureInitialized(Thread * self,Handle<mirror::Class> c,bool can_init_fields,bool can_init_parents)6188 bool ClassLinker::EnsureInitialized(Thread* self,
6189 Handle<mirror::Class> c,
6190 bool can_init_fields,
6191 bool can_init_parents) {
6192 DCHECK(c != nullptr);
6193
6194 if (c->IsInitialized()) {
6195 // If we've seen an initialized but not visibly initialized class
6196 // many times, request visible initialization.
6197 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
6198 // Thanks to the x86 memory model classes skip the initialized status.
6199 DCHECK(c->IsVisiblyInitialized());
6200 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
6201 if (self->IncrementMakeVisiblyInitializedCounter()) {
6202 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
6203 }
6204 }
6205 return true;
6206 }
6207 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
6208 //
6209 // Ensure the bitstring is initialized before any of the class initialization
6210 // logic occurs. Once a class initializer starts running, objects can
6211 // escape into the heap and use the subtype checking code.
6212 //
6213 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
6214 // can be used as a source for the IsSubClass check, and that all ancestors
6215 // of the class are Assigned (can be used as a target for IsSubClass check)
6216 // or Overflowed (can be used as a source for IsSubClass check).
6217 if (kBitstringSubtypeCheckEnabled) {
6218 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
6219 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
6220 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
6221 }
6222 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
6223 if (!success) {
6224 if (can_init_fields && can_init_parents) {
6225 CHECK(self->IsExceptionPending()) << c->PrettyClass();
6226 } else {
6227 // There may or may not be an exception pending. If there is, clear it.
6228 // We propagate the exception only if we can initialize fields and parents.
6229 self->ClearException();
6230 }
6231 } else {
6232 self->AssertNoPendingException();
6233 }
6234 return success;
6235 }
6236
FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,ObjPtr<mirror::Class> new_class)6237 void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
6238 ObjPtr<mirror::Class> new_class) {
6239 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
6240 for (ArtField& field : new_class->GetIFields()) {
6241 if (field.GetDeclaringClass() == temp_class) {
6242 field.SetDeclaringClass(new_class);
6243 }
6244 }
6245
6246 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
6247 for (ArtField& field : new_class->GetSFields()) {
6248 if (field.GetDeclaringClass() == temp_class) {
6249 field.SetDeclaringClass(new_class);
6250 }
6251 }
6252
6253 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
6254 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
6255 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
6256 if (method.GetDeclaringClass() == temp_class) {
6257 method.SetDeclaringClass(new_class);
6258 }
6259 }
6260
6261 // Make sure the remembered set and mod-union tables know that we updated some of the native
6262 // roots.
6263 WriteBarrier::ForEveryFieldWrite(new_class);
6264 }
6265
RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6266 void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6267 CHECK(class_loader->GetAllocator() == nullptr);
6268 CHECK(class_loader->GetClassTable() == nullptr);
6269 Thread* const self = Thread::Current();
6270 ClassLoaderData data;
6271 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
6272 // Create and set the class table.
6273 data.class_table = new ClassTable;
6274 class_loader->SetClassTable(data.class_table);
6275 // Create and set the linear allocator.
6276 data.allocator = Runtime::Current()->CreateLinearAlloc();
6277 class_loader->SetAllocator(data.allocator);
6278 // Add to the list so that we know to free the data later.
6279 class_loaders_.push_back(data);
6280 }
6281
InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6282 ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6283 if (class_loader == nullptr) {
6284 return boot_class_table_.get();
6285 }
6286 ClassTable* class_table = class_loader->GetClassTable();
6287 if (class_table == nullptr) {
6288 RegisterClassLoader(class_loader);
6289 class_table = class_loader->GetClassTable();
6290 DCHECK(class_table != nullptr);
6291 }
6292 return class_table;
6293 }
6294
ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader)6295 ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
6296 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
6297 }
6298
LinkClass(Thread * self,const char * descriptor,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,MutableHandle<mirror::Class> * h_new_class_out)6299 bool ClassLinker::LinkClass(Thread* self,
6300 const char* descriptor,
6301 Handle<mirror::Class> klass,
6302 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
6303 MutableHandle<mirror::Class>* h_new_class_out) {
6304 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6305
6306 if (!LinkSuperClass(klass)) {
6307 return false;
6308 }
6309 ArtMethod* imt_data[ImTable::kSize];
6310 // If there are any new conflicts compared to super class.
6311 bool new_conflict = false;
6312 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
6313 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
6314 return false;
6315 }
6316 if (!LinkInstanceFields(self, klass)) {
6317 return false;
6318 }
6319 size_t class_size;
6320 if (!LinkStaticFields(self, klass, &class_size)) {
6321 return false;
6322 }
6323 CreateReferenceInstanceOffsets(klass);
6324 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
6325
6326 ImTable* imt = nullptr;
6327 if (klass->ShouldHaveImt()) {
6328 // If there are any new conflicts compared to the super class we can not make a copy. There
6329 // can be cases where both will have a conflict method at the same slot without having the same
6330 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6331 // will possibly create a table that is incorrect for either of the classes.
6332 // Same IMT with new_conflict does not happen very often.
6333 if (!new_conflict) {
6334 ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6335 if (super_imt != nullptr) {
6336 bool imt_equals = true;
6337 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6338 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6339 }
6340 if (imt_equals) {
6341 imt = super_imt;
6342 }
6343 }
6344 }
6345 if (imt == nullptr) {
6346 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6347 imt = reinterpret_cast<ImTable*>(
6348 allocator->Alloc(self,
6349 ImTable::SizeInBytes(image_pointer_size_),
6350 LinearAllocKind::kNoGCRoots));
6351 if (imt == nullptr) {
6352 return false;
6353 }
6354 imt->Populate(imt_data, image_pointer_size_);
6355 }
6356 }
6357
6358 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6359 // We don't need to retire this class as it has no embedded tables or it was created the
6360 // correct size during class linker initialization.
6361 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
6362
6363 if (klass->ShouldHaveEmbeddedVTable()) {
6364 klass->PopulateEmbeddedVTable(image_pointer_size_);
6365 }
6366 if (klass->ShouldHaveImt()) {
6367 klass->SetImt(imt, image_pointer_size_);
6368 }
6369
6370 // Update CHA info based on whether we override methods.
6371 // Have to do this before setting the class as resolved which allows
6372 // instantiation of klass.
6373 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6374 cha_->UpdateAfterLoadingOf(klass);
6375 }
6376
6377 // This will notify waiters on klass that saw the not yet resolved
6378 // class in the class_table_ during EnsureResolved.
6379 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
6380 h_new_class_out->Assign(klass.Get());
6381 } else {
6382 CHECK(!klass->IsResolved());
6383 // Retire the temporary class and create the correctly sized resolved class.
6384 StackHandleScope<1> hs(self);
6385 Handle<mirror::Class> h_new_class =
6386 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
6387 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6388 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6389 // may not see any references to the target space and clean the card for a class if another
6390 // class had the same array pointer.
6391 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
6392 klass->SetSFieldsPtrUnchecked(nullptr);
6393 klass->SetIFieldsPtrUnchecked(nullptr);
6394 if (UNLIKELY(h_new_class == nullptr)) {
6395 self->AssertPendingOOMException();
6396 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
6397 return false;
6398 }
6399
6400 CHECK_EQ(h_new_class->GetClassSize(), class_size);
6401 ObjectLock<mirror::Class> lock(self, h_new_class);
6402 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
6403
6404 if (LIKELY(descriptor != nullptr)) {
6405 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
6406 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
6407 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
6408 const ObjPtr<mirror::Class> existing =
6409 table->UpdateClass(h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
6410 CHECK_EQ(existing, klass.Get());
6411 WriteBarrierOnClassLoaderLocked(class_loader, h_new_class.Get());
6412 }
6413
6414 // Update CHA info based on whether we override methods.
6415 // Have to do this before setting the class as resolved which allows
6416 // instantiation of klass.
6417 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
6418 cha_->UpdateAfterLoadingOf(h_new_class);
6419 }
6420
6421 // This will notify waiters on temp class that saw the not yet resolved class in the
6422 // class_table_ during EnsureResolved.
6423 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
6424
6425 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
6426 // This will notify waiters on new_class that saw the not yet resolved
6427 // class in the class_table_ during EnsureResolved.
6428 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
6429 // Return the new class.
6430 h_new_class_out->Assign(h_new_class.Get());
6431 }
6432 return true;
6433 }
6434
LoadSuperAndInterfaces(Handle<mirror::Class> klass,const DexFile & dex_file)6435 bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
6436 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
6437 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
6438 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6439 if (super_class_idx.IsValid()) {
6440 // Check that a class does not inherit from itself directly.
6441 //
6442 // TODO: This is a cheap check to detect the straightforward case
6443 // of a class extending itself (b/28685551), but we should do a
6444 // proper cycle detection on loaded classes, to detect all cases
6445 // of class circularity errors (b/28830038).
6446 if (super_class_idx == class_def.class_idx_) {
6447 ThrowClassCircularityError(klass.Get(),
6448 "Class %s extends itself",
6449 klass->PrettyDescriptor().c_str());
6450 return false;
6451 }
6452
6453 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
6454 if (super_class == nullptr) {
6455 DCHECK(Thread::Current()->IsExceptionPending());
6456 return false;
6457 }
6458 // Verify
6459 if (!klass->CanAccess(super_class)) {
6460 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
6461 super_class->PrettyDescriptor().c_str(),
6462 klass->PrettyDescriptor().c_str());
6463 return false;
6464 }
6465 CHECK(super_class->IsResolved());
6466 klass->SetSuperClass(super_class);
6467 }
6468 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
6469 if (interfaces != nullptr) {
6470 for (size_t i = 0; i < interfaces->Size(); i++) {
6471 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
6472 if (idx.IsValid()) {
6473 // Check that a class does not implement itself directly.
6474 //
6475 // TODO: This is a cheap check to detect the straightforward case of a class implementing
6476 // itself, but we should do a proper cycle detection on loaded classes, to detect all cases
6477 // of class circularity errors. See b/28685551, b/28830038, and b/301108855
6478 if (idx == class_def.class_idx_) {
6479 ThrowClassCircularityError(
6480 klass.Get(), "Class %s implements itself", klass->PrettyDescriptor().c_str());
6481 return false;
6482 }
6483 }
6484
6485 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
6486 if (interface == nullptr) {
6487 DCHECK(Thread::Current()->IsExceptionPending());
6488 return false;
6489 }
6490 // Verify
6491 if (!klass->CanAccess(interface)) {
6492 // TODO: the RI seemed to ignore this in my testing.
6493 ThrowIllegalAccessError(klass.Get(),
6494 "Interface %s implemented by class %s is inaccessible",
6495 interface->PrettyDescriptor().c_str(),
6496 klass->PrettyDescriptor().c_str());
6497 return false;
6498 }
6499 }
6500 }
6501 // Mark the class as loaded.
6502 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
6503 return true;
6504 }
6505
LinkSuperClass(Handle<mirror::Class> klass)6506 bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
6507 CHECK(!klass->IsPrimitive());
6508 ObjPtr<mirror::Class> super = klass->GetSuperClass();
6509 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6510 if (klass.Get() == object_class) {
6511 if (super != nullptr) {
6512 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
6513 return false;
6514 }
6515 return true;
6516 }
6517 if (super == nullptr) {
6518 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
6519 klass->PrettyDescriptor().c_str());
6520 return false;
6521 }
6522 // Verify
6523 if (klass->IsInterface() && super != object_class) {
6524 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6525 return false;
6526 }
6527 if (super->IsFinal()) {
6528 ThrowVerifyError(klass.Get(),
6529 "Superclass %s of %s is declared final",
6530 super->PrettyDescriptor().c_str(),
6531 klass->PrettyDescriptor().c_str());
6532 return false;
6533 }
6534 if (super->IsInterface()) {
6535 ThrowIncompatibleClassChangeError(klass.Get(),
6536 "Superclass %s of %s is an interface",
6537 super->PrettyDescriptor().c_str(),
6538 klass->PrettyDescriptor().c_str());
6539 return false;
6540 }
6541 if (!klass->CanAccess(super)) {
6542 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
6543 super->PrettyDescriptor().c_str(),
6544 klass->PrettyDescriptor().c_str());
6545 return false;
6546 }
6547 if (!VerifyRecordClass(klass, super)) {
6548 DCHECK(Thread::Current()->IsExceptionPending());
6549 return false;
6550 }
6551
6552 // Inherit kAccClassIsFinalizable from the superclass in case this
6553 // class doesn't override finalize.
6554 if (super->IsFinalizable()) {
6555 klass->SetFinalizable();
6556 }
6557
6558 // Inherit class loader flag form super class.
6559 if (super->IsClassLoaderClass()) {
6560 klass->SetClassLoaderClass();
6561 }
6562
6563 // Inherit reference flags (if any) from the superclass.
6564 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
6565 if (reference_flags != 0) {
6566 CHECK_EQ(klass->GetClassFlags(), 0u);
6567 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
6568 }
6569 // Disallow custom direct subclasses of java.lang.ref.Reference.
6570 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
6571 ThrowLinkageError(klass.Get(),
6572 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
6573 klass->PrettyDescriptor().c_str());
6574 return false;
6575 }
6576
6577 if (kIsDebugBuild) {
6578 // Ensure super classes are fully resolved prior to resolving fields..
6579 while (super != nullptr) {
6580 CHECK(super->IsResolved());
6581 super = super->GetSuperClass();
6582 }
6583 }
6584 return true;
6585 }
6586
6587 // Comparator for name and signature of a method, used in finding overriding methods. Implementation
6588 // avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6589 // caches in the implementation below.
6590 class MethodNameAndSignatureComparator final : public ValueObject {
6591 public:
6592 explicit MethodNameAndSignatureComparator(ArtMethod* method)
REQUIRES_SHARED(Locks::mutator_lock_)6593 REQUIRES_SHARED(Locks::mutator_lock_) :
6594 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
6595 name_view_() {
6596 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
6597 }
6598
GetNameView()6599 ALWAYS_INLINE std::string_view GetNameView() {
6600 if (name_view_.empty()) {
6601 name_view_ = dex_file_->GetStringView(mid_->name_idx_);
6602 }
6603 return name_view_;
6604 }
6605
HasSameNameAndSignature(ArtMethod * other)6606 bool HasSameNameAndSignature(ArtMethod* other)
6607 REQUIRES_SHARED(Locks::mutator_lock_) {
6608 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
6609 const DexFile* other_dex_file = other->GetDexFile();
6610 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
6611 if (dex_file_ == other_dex_file) {
6612 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6613 }
6614 return GetNameView() == other_dex_file->GetStringView(other_mid.name_idx_) &&
6615 dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
6616 }
6617
6618 private:
6619 // Dex file for the method to compare against.
6620 const DexFile* const dex_file_;
6621 // MethodId for the method to compare against.
6622 const dex::MethodId* const mid_;
6623 // Lazily computed name from the dex file's strings.
6624 std::string_view name_view_;
6625 };
6626
GetImtOwner(ObjPtr<mirror::Class> klass)6627 static ObjPtr<mirror::Class> GetImtOwner(ObjPtr<mirror::Class> klass)
6628 REQUIRES_SHARED(Locks::mutator_lock_) {
6629 ImTable* imt = klass->GetImt(kRuntimePointerSize);
6630 DCHECK(imt != nullptr);
6631 while (klass->HasSuperClass()) {
6632 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
6633 // Abstract classes cannot have IMTs, so we skip them.
6634 while (super_class->IsAbstract()) {
6635 DCHECK(super_class->HasSuperClass());
6636 super_class = super_class->GetSuperClass();
6637 }
6638 DCHECK(super_class->ShouldHaveImt());
6639 if (imt != super_class->GetImt(kRuntimePointerSize)) {
6640 // IMT not shared with the super class, return the current class.
6641 DCHECK_EQ(klass->GetImt(kRuntimePointerSize), imt) << klass->PrettyClass();
6642 return klass;
6643 }
6644 klass = super_class;
6645 }
6646 return nullptr;
6647 }
6648
AddMethodToConflictTable(ObjPtr<mirror::Class> klass,ArtMethod * conflict_method,ArtMethod * interface_method,ArtMethod * method)6649 ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
6650 ArtMethod* conflict_method,
6651 ArtMethod* interface_method,
6652 ArtMethod* method) {
6653 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
6654 Runtime* const runtime = Runtime::Current();
6655
6656 // The IMT may be shared with a super class, in which case we need to use that
6657 // super class's `LinearAlloc`. The conflict itself should be limited to
6658 // methods at or higher up the chain of the IMT owner, otherwise class
6659 // linker would have created a different IMT.
6660 ObjPtr<mirror::Class> imt_owner = GetImtOwner(klass);
6661 DCHECK(imt_owner != nullptr);
6662
6663 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(imt_owner->GetClassLoader());
6664 // If the imt owner is in an image, the imt is also there and not in the
6665 // linear alloc.
6666 DCHECK_IMPLIES(runtime->GetHeap()->FindSpaceFromObject(imt_owner, /*fail_ok=*/true) == nullptr,
6667 linear_alloc->Contains(klass->GetImt(kRuntimePointerSize)));
6668
6669 // Create a new entry if the existing one is the shared conflict method.
6670 ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
6671 ? runtime->CreateImtConflictMethod(linear_alloc)
6672 : conflict_method;
6673
6674 // Allocate a new table. Note that we will leak this table at the next conflict,
6675 // but that's a tradeoff compared to making the table fixed size.
6676 void* data = linear_alloc->Alloc(
6677 Thread::Current(),
6678 ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
6679 LinearAllocKind::kNoGCRoots);
6680 if (data == nullptr) {
6681 LOG(ERROR) << "Failed to allocate conflict table";
6682 return conflict_method;
6683 }
6684 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6685 interface_method,
6686 method,
6687 image_pointer_size_);
6688
6689 // Do a fence to ensure threads see the data in the table before it is assigned
6690 // to the conflict method.
6691 // Note that there is a race in the presence of multiple threads and we may leak
6692 // memory from the LinearAlloc, but that's a tradeoff compared to using
6693 // atomic operations.
6694 std::atomic_thread_fence(std::memory_order_release);
6695 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6696 return new_conflict_method;
6697 }
6698
SetIMTRef(ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ArtMethod * current_method,bool * new_conflict,ArtMethod ** imt_ref)6699 void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6700 ArtMethod* imt_conflict_method,
6701 ArtMethod* current_method,
6702 /*out*/bool* new_conflict,
6703 /*out*/ArtMethod** imt_ref) {
6704 // Place method in imt if entry is empty, place conflict otherwise.
6705 if (*imt_ref == unimplemented_method) {
6706 *imt_ref = current_method;
6707 } else if (!(*imt_ref)->IsRuntimeMethod()) {
6708 // If we are not a conflict and we have the same signature and name as the imt
6709 // entry, it must be that we overwrote a superclass vtable entry.
6710 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6711 // conflict methods.
6712 MethodNameAndSignatureComparator imt_comparator(
6713 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
6714 if (imt_comparator.HasSameNameAndSignature(
6715 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
6716 *imt_ref = current_method;
6717 } else {
6718 *imt_ref = imt_conflict_method;
6719 *new_conflict = true;
6720 }
6721 } else {
6722 // Place the default conflict method. Note that there may be an existing conflict
6723 // method in the IMT, but it could be one tailored to the super class, with a
6724 // specific ImtConflictTable.
6725 *imt_ref = imt_conflict_method;
6726 *new_conflict = true;
6727 }
6728 }
6729
FillIMTAndConflictTables(ObjPtr<mirror::Class> klass)6730 void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
6731 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6732 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
6733 ArtMethod* imt_data[ImTable::kSize];
6734 Runtime* const runtime = Runtime::Current();
6735 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6736 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
6737 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
6738 if (klass->GetIfTable() != nullptr) {
6739 bool new_conflict = false;
6740 FillIMTFromIfTable(klass->GetIfTable(),
6741 unimplemented_method,
6742 conflict_method,
6743 klass,
6744 /*create_conflict_tables=*/true,
6745 /*ignore_copied_methods=*/false,
6746 &new_conflict,
6747 &imt_data[0]);
6748 }
6749 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6750 // we can just use the same pointer.
6751 ImTable* imt = nullptr;
6752 ImTable* super_imt = klass->FindSuperImt(image_pointer_size_);
6753 if (super_imt != nullptr) {
6754 bool same = true;
6755 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6756 ArtMethod* method = imt_data[i];
6757 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6758 if (method != super_method) {
6759 bool is_conflict_table = method->IsRuntimeMethod() &&
6760 method != unimplemented_method &&
6761 method != conflict_method;
6762 // Verify conflict contents.
6763 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6764 super_method != unimplemented_method &&
6765 super_method != conflict_method;
6766 if (!is_conflict_table || !super_conflict_table) {
6767 same = false;
6768 } else {
6769 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6770 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6771 same = same && table1->Equals(table2, image_pointer_size_);
6772 }
6773 }
6774 }
6775 if (same) {
6776 imt = super_imt;
6777 }
6778 }
6779 if (imt == nullptr) {
6780 imt = klass->GetImt(image_pointer_size_);
6781 DCHECK(imt != nullptr);
6782 DCHECK_NE(imt, super_imt);
6783 imt->Populate(imt_data, image_pointer_size_);
6784 } else {
6785 klass->SetImt(imt, image_pointer_size_);
6786 }
6787 }
6788
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc,PointerSize image_pointer_size)6789 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6790 LinearAlloc* linear_alloc,
6791 PointerSize image_pointer_size) {
6792 void* data = linear_alloc->Alloc(Thread::Current(),
6793 ImtConflictTable::ComputeSize(count, image_pointer_size),
6794 LinearAllocKind::kNoGCRoots);
6795 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6796 }
6797
CreateImtConflictTable(size_t count,LinearAlloc * linear_alloc)6798 ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6799 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6800 }
6801
FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,ArtMethod * unimplemented_method,ArtMethod * imt_conflict_method,ObjPtr<mirror::Class> klass,bool create_conflict_tables,bool ignore_copied_methods,bool * new_conflict,ArtMethod ** imt)6802 void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
6803 ArtMethod* unimplemented_method,
6804 ArtMethod* imt_conflict_method,
6805 ObjPtr<mirror::Class> klass,
6806 bool create_conflict_tables,
6807 bool ignore_copied_methods,
6808 /*out*/bool* new_conflict,
6809 /*out*/ArtMethod** imt) {
6810 uint32_t conflict_counts[ImTable::kSize] = {};
6811 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6812 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6813 const size_t num_virtuals = interface->NumVirtualMethods();
6814 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6815 // Virtual methods can be larger than the if table methods if there are default methods.
6816 DCHECK_GE(num_virtuals, method_array_count);
6817 if (kIsDebugBuild) {
6818 if (klass->IsInterface()) {
6819 DCHECK_EQ(method_array_count, 0u);
6820 } else {
6821 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6822 }
6823 }
6824 if (method_array_count == 0) {
6825 continue;
6826 }
6827 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6828 for (size_t j = 0; j < method_array_count; ++j) {
6829 ArtMethod* implementation_method =
6830 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6831 if (ignore_copied_methods && implementation_method->IsCopied()) {
6832 continue;
6833 }
6834 DCHECK(implementation_method != nullptr);
6835 // Miranda methods cannot be used to implement an interface method, but they are safe to put
6836 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6837 // or interface methods in the IMT here they will not create extra conflicts since we compare
6838 // names and signatures in SetIMTRef.
6839 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6840 const uint32_t imt_index = interface_method->GetImtIndex();
6841
6842 // There is only any conflicts if all of the interface methods for an IMT slot don't have
6843 // the same implementation method, keep track of this to avoid creating a conflict table in
6844 // this case.
6845
6846 // Conflict table size for each IMT slot.
6847 ++conflict_counts[imt_index];
6848
6849 SetIMTRef(unimplemented_method,
6850 imt_conflict_method,
6851 implementation_method,
6852 /*out*/new_conflict,
6853 /*out*/&imt[imt_index]);
6854 }
6855 }
6856
6857 if (create_conflict_tables) {
6858 // Create the conflict tables.
6859 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
6860 for (size_t i = 0; i < ImTable::kSize; ++i) {
6861 size_t conflicts = conflict_counts[i];
6862 if (imt[i] == imt_conflict_method) {
6863 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6864 if (new_table != nullptr) {
6865 ArtMethod* new_conflict_method =
6866 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6867 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6868 imt[i] = new_conflict_method;
6869 } else {
6870 LOG(ERROR) << "Failed to allocate conflict table";
6871 imt[i] = imt_conflict_method;
6872 }
6873 } else {
6874 DCHECK_NE(imt[i], imt_conflict_method);
6875 }
6876 }
6877
6878 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
6879 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
6880 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6881 // Virtual methods can be larger than the if table methods if there are default methods.
6882 if (method_array_count == 0) {
6883 continue;
6884 }
6885 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
6886 for (size_t j = 0; j < method_array_count; ++j) {
6887 ArtMethod* implementation_method =
6888 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6889 if (ignore_copied_methods && implementation_method->IsCopied()) {
6890 continue;
6891 }
6892 DCHECK(implementation_method != nullptr);
6893 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
6894 const uint32_t imt_index = interface_method->GetImtIndex();
6895 if (!imt[imt_index]->IsRuntimeMethod() ||
6896 imt[imt_index] == unimplemented_method ||
6897 imt[imt_index] == imt_conflict_method) {
6898 continue;
6899 }
6900 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6901 const size_t num_entries = table->NumEntries(image_pointer_size_);
6902 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6903 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
6904 }
6905 }
6906 }
6907 }
6908
6909 namespace {
6910
6911 // Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6912 // set.
NotSubinterfaceOfAny(const ScopedArenaHashSet<mirror::Class * > & classes,ObjPtr<mirror::Class> val)6913 static bool NotSubinterfaceOfAny(
6914 const ScopedArenaHashSet<mirror::Class*>& classes,
6915 ObjPtr<mirror::Class> val)
6916 REQUIRES(Roles::uninterruptible_)
6917 REQUIRES_SHARED(Locks::mutator_lock_) {
6918 DCHECK(val != nullptr);
6919 for (ObjPtr<mirror::Class> c : classes) {
6920 if (val->IsAssignableFrom(c)) {
6921 return false;
6922 }
6923 }
6924 return true;
6925 }
6926
6927 // We record new interfaces by the index of the direct interface and the index in the
6928 // direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6929 struct NewInterfaceReference {
6930 uint32_t direct_interface_index;
6931 uint32_t direct_interface_iftable_index;
6932 };
6933
6934 class ProxyInterfacesAccessor {
6935 public:
6936 explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
REQUIRES_SHARED(Locks::mutator_lock_)6937 REQUIRES_SHARED(Locks::mutator_lock_)
6938 : interfaces_(interfaces) {}
6939
GetLength()6940 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6941 return interfaces_->GetLength();
6942 }
6943
GetInterface(size_t index)6944 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6945 DCHECK_LT(index, GetLength());
6946 return interfaces_->GetWithoutChecks(index);
6947 }
6948
6949 private:
6950 Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6951 };
6952
6953 class NonProxyInterfacesAccessor {
6954 public:
NonProxyInterfacesAccessor(ClassLinker * class_linker,Handle<mirror::Class> klass)6955 NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6956 REQUIRES_SHARED(Locks::mutator_lock_)
6957 : interfaces_(klass->GetInterfaceTypeList()),
6958 class_linker_(class_linker),
6959 klass_(klass) {
6960 DCHECK(!klass->IsProxyClass());
6961 }
6962
GetLength()6963 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6964 return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6965 }
6966
GetInterface(size_t index)6967 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6968 DCHECK_LT(index, GetLength());
6969 dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6970 return class_linker_->LookupResolvedType(type_index, klass_.Get());
6971 }
6972
6973 private:
6974 const dex::TypeList* interfaces_;
6975 ClassLinker* class_linker_;
6976 Handle<mirror::Class> klass_;
6977 };
6978
6979 // Finds new interfaces to add to the interface table in addition to superclass interfaces.
6980 //
6981 // Interfaces in the interface table must satisfy the following constraint:
6982 // all I, J: Interface | I <: J implies J precedes I
6983 // (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
6984 // to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
6985 //
6986 // This function returns a list of references for all interfaces in the transitive
6987 // closure of the direct interfaces that are not in the superclass interfaces.
6988 // The entries in the list are ordered to satisfy the interface table ordering
6989 // constraint and therefore the interface table formed by appending them to the
6990 // superclass interface table shall also satisfy that constraint.
6991 template <typename InterfaceAccessor>
6992 ALWAYS_INLINE
FindNewIfTableInterfaces(ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces,ArrayRef<NewInterfaceReference> initial_storage,ScopedArenaVector<NewInterfaceReference> * supplemental_storage)6993 static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
6994 ObjPtr<mirror::IfTable> super_iftable,
6995 size_t super_ifcount,
6996 ScopedArenaAllocator* allocator,
6997 InterfaceAccessor&& interfaces,
6998 ArrayRef<NewInterfaceReference> initial_storage,
6999 /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
7000 REQUIRES_SHARED(Locks::mutator_lock_) {
7001 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
7002
7003 // This is the set of all classes already in the iftable. Used to make checking
7004 // if a class has already been added quicker.
7005 constexpr size_t kBufferSize = 32; // 256 bytes on 64-bit architectures.
7006 mirror::Class* buffer[kBufferSize];
7007 ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
7008 // The first super_ifcount elements are from the superclass. We note that they are already added.
7009 for (size_t i = 0; i < super_ifcount; i++) {
7010 ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
7011 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
7012 classes_in_iftable.Put(iface.Ptr());
7013 }
7014
7015 ArrayRef<NewInterfaceReference> current_storage = initial_storage;
7016 DCHECK_NE(current_storage.size(), 0u);
7017 size_t num_new_interfaces = 0u;
7018 auto insert_reference = [&](uint32_t direct_interface_index,
7019 uint32_t direct_interface_iface_index) {
7020 if (UNLIKELY(num_new_interfaces == current_storage.size())) {
7021 bool copy = current_storage.data() != supplemental_storage->data();
7022 supplemental_storage->resize(2u * num_new_interfaces);
7023 if (copy) {
7024 std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
7025 }
7026 current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
7027 }
7028 current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
7029 ++num_new_interfaces;
7030 };
7031
7032 for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
7033 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7034
7035 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
7036 // At this point in the loop current-iface-list has the invariant that:
7037 // for every pair of interfaces I,J within it:
7038 // if index_of(I) < index_of(J) then I is not a subtype of J
7039
7040 // If we have already seen this element then all of its super-interfaces must already be in the
7041 // current-iface-list so we can skip adding it.
7042 if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
7043 // We haven't seen this interface so add all of its super-interfaces onto the
7044 // current-iface-list, skipping those already on it.
7045 int32_t ifcount = interface->GetIfTableCount();
7046 for (int32_t j = 0; j < ifcount; j++) {
7047 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7048 if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
7049 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
7050 classes_in_iftable.Put(super_interface.Ptr());
7051 insert_reference(i, j);
7052 }
7053 }
7054 // Add this interface reference after all of its super-interfaces.
7055 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
7056 classes_in_iftable.Put(interface.Ptr());
7057 insert_reference(i, dex::kDexNoIndex);
7058 } else if (kIsDebugBuild) {
7059 // Check all super-interfaces are already in the list.
7060 int32_t ifcount = interface->GetIfTableCount();
7061 for (int32_t j = 0; j < ifcount; j++) {
7062 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
7063 DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
7064 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
7065 << ", a superinterface of " << interface->PrettyClass();
7066 }
7067 }
7068 }
7069 return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
7070 }
7071
7072 template <typename InterfaceAccessor>
SetupInterfaceLookupTable(Thread * self,Handle<mirror::Class> klass,ScopedArenaAllocator * allocator,InterfaceAccessor && interfaces)7073 static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
7074 Thread* self,
7075 Handle<mirror::Class> klass,
7076 ScopedArenaAllocator* allocator,
7077 InterfaceAccessor&& interfaces)
7078 REQUIRES_SHARED(Locks::mutator_lock_) {
7079 DCHECK(klass->HasSuperClass());
7080 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
7081 DCHECK(super_iftable != nullptr);
7082 const size_t num_interfaces = interfaces.GetLength();
7083
7084 // If there are no new interfaces, return the interface table from superclass.
7085 // If any implementation methods are overridden, we shall copy the table and
7086 // the method arrays that contain any differences (copy-on-write).
7087 if (num_interfaces == 0) {
7088 return super_iftable;
7089 }
7090
7091 // Check that every class being implemented is an interface.
7092 for (size_t i = 0; i != num_interfaces; ++i) {
7093 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
7094 DCHECK(interface != nullptr);
7095 if (UNLIKELY(!interface->IsInterface())) {
7096 ThrowIncompatibleClassChangeError(klass.Get(),
7097 "Class %s implements non-interface class %s",
7098 klass->PrettyDescriptor().c_str(),
7099 interface->PrettyDescriptor().c_str());
7100 return nullptr;
7101 }
7102 }
7103
7104 static constexpr size_t kMaxStackReferences = 16;
7105 NewInterfaceReference initial_storage[kMaxStackReferences];
7106 ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
7107 const size_t super_ifcount = super_iftable->Count();
7108 ArrayRef<const NewInterfaceReference> new_interface_references =
7109 FindNewIfTableInterfaces(
7110 super_iftable,
7111 super_ifcount,
7112 allocator,
7113 interfaces,
7114 ArrayRef<NewInterfaceReference>(initial_storage),
7115 &supplemental_storage);
7116
7117 // If all declared interfaces were already present in superclass interface table,
7118 // return the interface table from superclass. See above.
7119 if (UNLIKELY(new_interface_references.empty())) {
7120 return super_iftable;
7121 }
7122
7123 // Create the interface table.
7124 size_t ifcount = super_ifcount + new_interface_references.size();
7125 ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
7126 if (UNLIKELY(iftable == nullptr)) {
7127 self->AssertPendingOOMException();
7128 return nullptr;
7129 }
7130 // Fill in table with superclass's iftable.
7131 if (super_ifcount != 0) {
7132 // Reload `super_iftable` as it may have been clobbered by the allocation.
7133 super_iftable = klass->GetSuperClass()->GetIfTable();
7134 for (size_t i = 0; i != super_ifcount; i++) {
7135 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
7136 DCHECK(super_interface != nullptr);
7137 iftable->SetInterface(i, super_interface);
7138 ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
7139 if (method_array != nullptr) {
7140 iftable->SetMethodArray(i, method_array);
7141 }
7142 }
7143 }
7144 // Fill in the table with additional interfaces.
7145 size_t current_index = super_ifcount;
7146 for (NewInterfaceReference ref : new_interface_references) {
7147 ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
7148 ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
7149 ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
7150 : direct_interface;
7151 iftable->SetInterface(current_index, new_interface);
7152 ++current_index;
7153 }
7154 DCHECK_EQ(current_index, ifcount);
7155
7156 if (kIsDebugBuild) {
7157 // Check that the iftable is ordered correctly.
7158 for (size_t i = 0; i < ifcount; i++) {
7159 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
7160 for (size_t j = i + 1; j < ifcount; j++) {
7161 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
7162 // !(if_a <: if_b)
7163 CHECK(!if_b->IsAssignableFrom(if_a))
7164 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
7165 << ") extends "
7166 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
7167 << "interface list.";
7168 }
7169 }
7170 }
7171
7172 return iftable;
7173 }
7174
7175 // Check that all vtable entries are present in this class's virtuals or are the same as a
7176 // superclasses vtable entry.
CheckClassOwnsVTableEntries(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7177 void CheckClassOwnsVTableEntries(Thread* self,
7178 Handle<mirror::Class> klass,
7179 PointerSize pointer_size)
7180 REQUIRES_SHARED(Locks::mutator_lock_) {
7181 StackHandleScope<2> hs(self);
7182 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7183 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
7184 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
7185 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
7186 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
7187 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
7188 CHECK(m != nullptr);
7189
7190 if (m->GetMethodIndexDuringLinking() != i) {
7191 LOG(WARNING) << m->PrettyMethod()
7192 << " has an unexpected method index for its spot in the vtable for class"
7193 << klass->PrettyClass();
7194 }
7195 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
7196 auto is_same_method = [m] (const ArtMethod& meth) {
7197 return &meth == m;
7198 };
7199 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
7200 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
7201 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
7202 << klass->PrettyClass() << " or any of its superclasses!";
7203 }
7204 }
7205 }
7206
7207 // Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
7208 // method is overridden in a subclass.
7209 template <PointerSize kPointerSize>
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass)7210 void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
7211 REQUIRES_SHARED(Locks::mutator_lock_) {
7212 StackHandleScope<1> hs(self);
7213 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
7214 int32_t num_entries = vtable->GetLength();
7215
7216 // Observations:
7217 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
7218 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
7219 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
7220 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
7221 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
7222 // * The single-pass algorithm will trade memory for speed, but that is OK.
7223
7224 CHECK_GT(num_entries, 0);
7225
7226 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
7227 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
7228 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7229 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
7230 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
7231 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
7232 << m2->PrettyMethod() << " (0x" << std::hex
7233 << reinterpret_cast<uintptr_t>(m2) << ")";
7234 };
7235 struct BaseHashType {
7236 static size_t HashCombine(size_t seed, size_t val) {
7237 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
7238 }
7239 };
7240
7241 // Check assuming all entries come from the same dex file.
7242 {
7243 // Find the first interesting method and its dex file.
7244 int32_t start = 0;
7245 for (; start < num_entries; ++start) {
7246 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
7247 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7248 // maybe).
7249 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7250 vtable_entry->GetAccessFlags())) {
7251 continue;
7252 }
7253 break;
7254 }
7255 if (start == num_entries) {
7256 return;
7257 }
7258 const DexFile* dex_file =
7259 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
7260 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
7261
7262 // Helper function to avoid logging if we have to run the cross-file checks.
7263 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
7264 // Use a map to store seen entries, as the storage space is too large for a bitvector.
7265 using PairType = std::pair<uint32_t, uint16_t>;
7266 struct PairHash : BaseHashType {
7267 size_t operator()(const PairType& key) const {
7268 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
7269 }
7270 };
7271 HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
7272 seen.reserve(2 * num_entries);
7273 bool need_slow_path = false;
7274 bool found_dup = false;
7275 for (int i = start; i < num_entries; ++i) {
7276 // Can use Unchecked here as the start loop already ensured that the arrays are correct
7277 // wrt/ kPointerSize.
7278 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7279 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7280 vtable_entry->GetAccessFlags())) {
7281 continue;
7282 }
7283 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7284 if (dex_file != m->GetDexFile()) {
7285 need_slow_path = true;
7286 break;
7287 }
7288 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
7289 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
7290 auto it = seen.find(pair);
7291 if (it != seen.end()) {
7292 found_dup = true;
7293 if (log_warn) {
7294 log_fn(it->second, i);
7295 }
7296 } else {
7297 seen.insert(std::make_pair(pair, i));
7298 }
7299 }
7300 return std::make_pair(need_slow_path, found_dup);
7301 };
7302 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
7303 if (!result.first) {
7304 if (result.second) {
7305 check_fn(/* log_warn= */ true);
7306 }
7307 return;
7308 }
7309 }
7310
7311 // Need to check across dex files.
7312 struct Entry {
7313 size_t cached_hash = 0;
7314 uint32_t name_len = 0;
7315 const char* name = nullptr;
7316 Signature signature = Signature::NoSignature();
7317
7318 Entry() = default;
7319 Entry(const Entry& other) = default;
7320 Entry& operator=(const Entry& other) = default;
7321
7322 Entry(const DexFile* dex_file, const dex::MethodId& mid)
7323 : name_len(0), // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
7324 // This call writes `name_len` and it is therefore necessary that the
7325 // initializer for `name_len` comes before it, otherwise the value
7326 // from the call would be overwritten by that initializer.
7327 name(dex_file->GetStringDataAndUtf16Length(mid.name_idx_, &name_len)),
7328 signature(dex_file->GetMethodSignature(mid)) {
7329 // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
7330 if (name[name_len] != 0) {
7331 name_len += strlen(name + name_len);
7332 }
7333 }
7334
7335 bool operator==(const Entry& other) const {
7336 return name_len == other.name_len &&
7337 memcmp(name, other.name, name_len) == 0 &&
7338 signature == other.signature;
7339 }
7340 };
7341 struct EntryHash {
7342 size_t operator()(const Entry& key) const {
7343 return key.cached_hash;
7344 }
7345 };
7346 HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
7347 for (int32_t i = 0; i < num_entries; ++i) {
7348 // Can use Unchecked here as the first loop already ensured that the arrays are correct
7349 // wrt/ kPointerSize.
7350 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
7351 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
7352 // maybe).
7353 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
7354 vtable_entry->GetAccessFlags())) {
7355 continue;
7356 }
7357 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7358 const DexFile* dex_file = m->GetDexFile();
7359 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7360
7361 Entry e(dex_file, mid);
7362
7363 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7364 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7365 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7366 sig_hash);
7367
7368 auto it = map.find(e);
7369 if (it != map.end()) {
7370 log_fn(it->second, i);
7371 } else {
7372 map.insert(std::make_pair(e, i));
7373 }
7374 }
7375 }
7376
CheckVTableHasNoDuplicates(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7377 void CheckVTableHasNoDuplicates(Thread* self,
7378 Handle<mirror::Class> klass,
7379 PointerSize pointer_size)
7380 REQUIRES_SHARED(Locks::mutator_lock_) {
7381 switch (pointer_size) {
7382 case PointerSize::k64:
7383 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7384 break;
7385 case PointerSize::k32:
7386 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7387 break;
7388 }
7389 }
7390
CheckVTable(Thread * self,Handle<mirror::Class> klass,PointerSize pointer_size)7391 static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
7392 REQUIRES_SHARED(Locks::mutator_lock_) {
7393 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7394 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7395 }
7396
7397 } // namespace
7398
7399 template <PointerSize kPointerSize>
7400 class ClassLinker::LinkMethodsHelper {
7401 public:
LinkMethodsHelper(ClassLinker * class_linker,Handle<mirror::Class> klass,Thread * self,Runtime * runtime)7402 LinkMethodsHelper(ClassLinker* class_linker,
7403 Handle<mirror::Class> klass,
7404 Thread* self,
7405 Runtime* runtime)
7406 : class_linker_(class_linker),
7407 klass_(klass),
7408 self_(self),
7409 runtime_(runtime),
7410 stack_(runtime->GetArenaPool()),
7411 allocator_(&stack_),
7412 copied_method_records_(copied_method_records_initial_buffer_,
7413 kCopiedMethodRecordInitialBufferSize,
7414 allocator_.Adapter()),
7415 num_new_copied_methods_(0u) {
7416 }
7417
7418 // Links the virtual and interface methods for the given class.
7419 //
7420 // Arguments:
7421 // * self - The current thread.
7422 // * klass - class, whose vtable will be filled in.
7423 // * interfaces - implemented interfaces for a proxy class, otherwise null.
7424 // * out_new_conflict - whether there is a new conflict compared to the superclass.
7425 // * out_imt - interface method table to fill.
7426 bool LinkMethods(
7427 Thread* self,
7428 Handle<mirror::Class> klass,
7429 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
7430 bool* out_new_conflict,
7431 ArtMethod** out_imt)
7432 REQUIRES_SHARED(Locks::mutator_lock_);
7433
7434 private:
7435 // Allocate a pointer array.
7436 static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
7437 REQUIRES_SHARED(Locks::mutator_lock_);
7438
7439 // Allocate method arrays for interfaces.
7440 bool AllocateIfTableMethodArrays(Thread* self,
7441 Handle<mirror::Class> klass,
7442 Handle<mirror::IfTable> iftable)
7443 REQUIRES_SHARED(Locks::mutator_lock_);
7444
7445 // Assign vtable indexes to declared virtual methods for a non-interface class other
7446 // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
7447 // This function also assigns vtable indexes for interface methods in new interfaces
7448 // and records data for copied methods which shall be referenced by the vtable.
7449 size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
7450 ObjPtr<mirror::Class> super_class,
7451 bool is_super_abstract,
7452 size_t num_virtual_methods,
7453 ObjPtr<mirror::IfTable> iftable)
7454 REQUIRES_SHARED(Locks::mutator_lock_);
7455
7456 bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
7457 size_t num_virtual_methods,
7458 ObjPtr<mirror::IfTable> iftable)
7459 REQUIRES_SHARED(Locks::mutator_lock_);
7460
7461 bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
7462 REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
7463
7464 void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
7465 bool FinalizeIfTable(Handle<mirror::Class> klass,
7466 MutableHandle<mirror::IfTable> iftable,
7467 Handle<mirror::PointerArray> vtable,
7468 bool is_klass_abstract,
7469 bool is_super_abstract,
7470 bool* out_new_conflict,
7471 ArtMethod** out_imt)
7472 REQUIRES_SHARED(Locks::mutator_lock_);
7473
ClobberOldMethods(LengthPrefixedArray<ArtMethod> * old_methods,LengthPrefixedArray<ArtMethod> * methods)7474 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7475 LengthPrefixedArray<ArtMethod>* methods) {
7476 if (kIsDebugBuild && old_methods != nullptr) {
7477 CHECK(methods != nullptr);
7478 // Put some random garbage in old methods to help find stale pointers.
7479 if (methods != old_methods) {
7480 // Need to make sure the GC is not running since it could be scanning the methods we are
7481 // about to overwrite.
7482 ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
7483 gc::ScopedGCCriticalSection gcs(self_,
7484 gc::kGcCauseClassLinker,
7485 gc::kCollectorTypeClassLinker);
7486 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
7487 kMethodSize,
7488 kMethodAlignment);
7489 memset(old_methods, 0xFEu, old_size);
7490 // Set size to 0 to avoid visiting declaring classes.
7491 if (gUseUserfaultfd) {
7492 old_methods->SetSize(0);
7493 }
7494 }
7495 }
7496 }
7497
7498 NO_INLINE
LogNewVirtuals(LengthPrefixedArray<ArtMethod> * methods) const7499 void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7500 REQUIRES_SHARED(Locks::mutator_lock_) {
7501 ObjPtr<mirror::Class> klass = klass_.Get();
7502 size_t num_new_copied_methods = num_new_copied_methods_;
7503 size_t old_method_count = methods->size() - num_new_copied_methods;
7504 size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7505 size_t num_miranda_methods = 0u;
7506 size_t num_overriding_default_methods = 0u;
7507 size_t num_default_methods = 0u;
7508 size_t num_overriding_default_conflict_methods = 0u;
7509 size_t num_default_conflict_methods = 0u;
7510 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7511 ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7512 if (m.IsDefault()) {
7513 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7514 ++num_overriding_default_methods;
7515 } else {
7516 ++num_default_methods;
7517 }
7518 } else if (m.IsDefaultConflicting()) {
7519 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7520 ++num_overriding_default_conflict_methods;
7521 } else {
7522 ++num_default_conflict_methods;
7523 }
7524 } else {
7525 DCHECK(m.IsMiranda());
7526 ++num_miranda_methods;
7527 }
7528 }
7529 VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7530 << " default_methods=" << num_default_methods
7531 << " overriding_default_methods=" << num_overriding_default_methods
7532 << " default_conflict_methods=" << num_default_conflict_methods
7533 << " overriding_default_conflict_methods="
7534 << num_overriding_default_conflict_methods;
7535 }
7536
7537 class MethodIndexEmptyFn {
7538 public:
MakeEmpty(uint32_t & item) const7539 void MakeEmpty(uint32_t& item) const {
7540 item = dex::kDexNoIndex;
7541 }
IsEmpty(const uint32_t & item) const7542 bool IsEmpty(const uint32_t& item) const {
7543 return item == dex::kDexNoIndex;
7544 }
7545 };
7546
7547 class VTableIndexCheckerDebug {
7548 protected:
VTableIndexCheckerDebug(size_t vtable_length)7549 explicit VTableIndexCheckerDebug(size_t vtable_length)
7550 : vtable_length_(vtable_length) {}
7551
CheckIndex(uint32_t index) const7552 void CheckIndex(uint32_t index) const {
7553 CHECK_LT(index, vtable_length_);
7554 }
7555
7556 private:
7557 uint32_t vtable_length_;
7558 };
7559
7560 class VTableIndexCheckerRelease {
7561 protected:
VTableIndexCheckerRelease(size_t vtable_length)7562 explicit VTableIndexCheckerRelease([[maybe_unused]] size_t vtable_length) {}
CheckIndex(uint32_t index) const7563 void CheckIndex([[maybe_unused]] uint32_t index) const {}
7564 };
7565
7566 using VTableIndexChecker =
7567 std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7568
7569 class VTableAccessor : private VTableIndexChecker {
7570 public:
VTableAccessor(uint8_t * raw_vtable,size_t vtable_length)7571 VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7572 REQUIRES_SHARED(Locks::mutator_lock_)
7573 : VTableIndexChecker(vtable_length),
7574 raw_vtable_(raw_vtable) {}
7575
GetVTableEntry(uint32_t index) const7576 ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
7577 this->CheckIndex(index);
7578 uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7579 if (kPointerSize == PointerSize::k64) {
7580 return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7581 } else {
7582 return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7583 }
7584 }
7585
7586 private:
7587 uint8_t* raw_vtable_;
7588 };
7589
7590 class VTableSignatureHash {
7591 public:
7592 explicit VTableSignatureHash(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7593 REQUIRES_SHARED(Locks::mutator_lock_)
7594 : accessor_(accessor) {}
7595
7596 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7597 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7598 return ComputeMethodHash(method);
7599 }
7600
7601 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7602 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7603 return ComputeMethodHash(accessor_.GetVTableEntry(index));
7604 }
7605
7606 private:
7607 VTableAccessor accessor_;
7608 };
7609
7610 class VTableSignatureEqual {
7611 public:
7612 explicit VTableSignatureEqual(VTableAccessor accessor)
REQUIRES_SHARED(Locks::mutator_lock_)7613 REQUIRES_SHARED(Locks::mutator_lock_)
7614 : accessor_(accessor) {}
7615
7616 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7617 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7618 return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
7619 }
7620
7621 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7622 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7623 return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7624 }
7625
7626 private:
7627 VTableAccessor accessor_;
7628 };
7629
7630 using VTableSignatureSet =
7631 ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
7632
7633 class DeclaredVirtualSignatureHash {
7634 public:
7635 explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7636 REQUIRES_SHARED(Locks::mutator_lock_)
7637 : klass_(klass) {}
7638
7639 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const7640 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7641 return ComputeMethodHash(method);
7642 }
7643
7644 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t index) const7645 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7646 DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7647 ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7648 return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7649 }
7650
7651 private:
7652 ObjPtr<mirror::Class> klass_;
7653 };
7654
7655 class DeclaredVirtualSignatureEqual {
7656 public:
7657 explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
REQUIRES_SHARED(Locks::mutator_lock_)7658 REQUIRES_SHARED(Locks::mutator_lock_)
7659 : klass_(klass) {}
7660
7661 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,ArtMethod * rhs) const7662 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7663 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7664 ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7665 return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7666 }
7667
7668 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(uint32_t lhs_index,uint32_t rhs_index) const7669 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7670 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7671 DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7672 return lhs_index == rhs_index;
7673 }
7674
7675 private:
7676 ObjPtr<mirror::Class> klass_;
7677 };
7678
7679 using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7680 MethodIndexEmptyFn,
7681 DeclaredVirtualSignatureHash,
7682 DeclaredVirtualSignatureEqual>;
7683
7684 // Helper class to keep records for determining the correct copied method to create.
7685 class CopiedMethodRecord {
7686 public:
7687 enum class State : uint32_t {
7688 // Note: The `*Single` values are used when we know that there is only one interface
7689 // method with the given signature that's not masked; that method is the main method.
7690 // We use this knowledge for faster masking check, otherwise we need to search for
7691 // a masking method through methods of all interfaces that could potentially mask it.
7692 kAbstractSingle,
7693 kDefaultSingle,
7694 kAbstract,
7695 kDefault,
7696 kDefaultConflict,
7697 kUseSuperMethod,
7698 };
7699
CopiedMethodRecord()7700 CopiedMethodRecord()
7701 : main_method_(nullptr),
7702 method_index_(0u),
7703 state_(State::kAbstractSingle) {}
7704
CopiedMethodRecord(ArtMethod * main_method,size_t vtable_index)7705 CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7706 : main_method_(main_method),
7707 method_index_(vtable_index),
7708 state_(State::kAbstractSingle) {}
7709
7710 // Set main method. The new main method must be more specific implementation.
SetMainMethod(ArtMethod * main_method)7711 void SetMainMethod(ArtMethod* main_method) {
7712 DCHECK(main_method_ != nullptr);
7713 main_method_ = main_method;
7714 }
7715
7716 // The main method is the first encountered default method if any,
7717 // otherwise the first encountered abstract method.
GetMainMethod() const7718 ArtMethod* GetMainMethod() const {
7719 return main_method_;
7720 }
7721
SetMethodIndex(size_t method_index)7722 void SetMethodIndex(size_t method_index) {
7723 DCHECK_NE(method_index, dex::kDexNoIndex);
7724 method_index_ = method_index;
7725 }
7726
GetMethodIndex() const7727 size_t GetMethodIndex() const {
7728 DCHECK_NE(method_index_, dex::kDexNoIndex);
7729 return method_index_;
7730 }
7731
SetState(State state)7732 void SetState(State state) {
7733 state_ = state;
7734 }
7735
GetState() const7736 State GetState() const {
7737 return state_;
7738 }
7739
7740 ALWAYS_INLINE
UpdateStateForInterface(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7741 void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7742 ArtMethod* interface_method,
7743 ObjPtr<mirror::IfTable> iftable,
7744 size_t ifcount,
7745 size_t index)
7746 REQUIRES_SHARED(Locks::mutator_lock_) {
7747 DCHECK_EQ(ifcount, iftable->Count());
7748 DCHECK_LT(index, ifcount);
7749 DCHECK(iface == interface_method->GetDeclaringClass());
7750 DCHECK(iface == iftable->GetInterface(index));
7751 DCHECK(interface_method->IsDefault());
7752 if (GetState() != State::kDefaultConflict) {
7753 DCHECK(GetState() == State::kDefault);
7754 // We do not record all overriding methods, so we need to walk over all
7755 // interfaces that could mask the `interface_method`.
7756 if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7757 return; // Found an overriding method that masks `interface_method`.
7758 }
7759 // We have a new default method that's not masked by any other method.
7760 SetState(State::kDefaultConflict);
7761 }
7762 }
7763
7764 ALWAYS_INLINE
UpdateState(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,size_t index)7765 void UpdateState(ObjPtr<mirror::Class> iface,
7766 ArtMethod* interface_method,
7767 size_t vtable_index,
7768 ObjPtr<mirror::IfTable> iftable,
7769 size_t ifcount,
7770 size_t index)
7771 REQUIRES_SHARED(Locks::mutator_lock_) {
7772 DCHECK_EQ(ifcount, iftable->Count());
7773 DCHECK_LT(index, ifcount);
7774 if (kIsDebugBuild) {
7775 if (interface_method->IsCopied()) {
7776 // Called from `FinalizeState()` for a default method from superclass.
7777 // The `index` points to the last interface inherited from the superclass
7778 // as we need to search only the new interfaces for masking methods.
7779 DCHECK(interface_method->IsDefault());
7780 } else {
7781 DCHECK(iface == interface_method->GetDeclaringClass());
7782 DCHECK(iface == iftable->GetInterface(index));
7783 }
7784 }
7785 DCHECK_EQ(vtable_index, method_index_);
7786 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7787 return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7788 };
7789 UpdateStateImpl(iface, interface_method, slow_is_masked);
7790 }
7791
7792 ALWAYS_INLINE
FinalizeState(ArtMethod * super_method,size_t vtable_index,ObjPtr<mirror::IfTable> iftable,size_t ifcount,ObjPtr<mirror::IfTable> super_iftable,size_t super_ifcount)7793 void FinalizeState(ArtMethod* super_method,
7794 size_t vtable_index,
7795 ObjPtr<mirror::IfTable> iftable,
7796 size_t ifcount,
7797 ObjPtr<mirror::IfTable> super_iftable,
7798 size_t super_ifcount)
7799 REQUIRES_SHARED(Locks::mutator_lock_) {
7800 DCHECK(super_method->IsCopied());
7801 DCHECK_EQ(vtable_index, method_index_);
7802 DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7803 DCHECK_NE(super_ifcount, 0u);
7804 if (super_method->IsDefault()) {
7805 if (UNLIKELY(super_method->IsDefaultConflicting())) {
7806 // Some of the default methods that contributed to the conflict in the superclass
7807 // may be masked by new interfaces. Walk over all the interfaces and update state
7808 // as long as the current state is not `kDefaultConflict`.
7809 size_t i = super_ifcount;
7810 while (GetState() != State::kDefaultConflict && i != 0u) {
7811 --i;
7812 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7813 DCHECK(iface == super_iftable->GetInterface(i));
7814 auto [found, index] =
7815 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7816 if (found) {
7817 ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7818 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7819 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7820 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7821 // use the `super_iftable` filled with implementation methods for that range.
7822 return ContainsImplementingMethod(
7823 super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7824 ContainsImplementingMethod(
7825 iftable, super_ifcount, ifcount, iface, vtable_index);
7826 };
7827 UpdateStateImpl(iface, interface_method, slow_is_masked);
7828 }
7829 }
7830 if (GetState() == State::kDefaultConflict) {
7831 SetState(State::kUseSuperMethod);
7832 }
7833 } else {
7834 // There was exactly one default method in superclass interfaces that was
7835 // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7836 // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7837 ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7838 UpdateState(
7839 iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7840 if (GetMainMethod() == super_method) {
7841 DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7842 SetState(State::kUseSuperMethod);
7843 }
7844 }
7845 } else {
7846 DCHECK(super_method->IsMiranda());
7847 // Any default methods with this signature in superclass interfaces have been
7848 // masked by subinterfaces. Check if we can reuse the miranda method.
7849 if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7850 SetState(State::kUseSuperMethod);
7851 }
7852 }
7853 }
7854
7855 private:
7856 template <typename Predicate>
7857 ALWAYS_INLINE
UpdateStateImpl(ObjPtr<mirror::Class> iface,ArtMethod * interface_method,Predicate && slow_is_masked)7858 void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7859 ArtMethod* interface_method,
7860 Predicate&& slow_is_masked)
7861 REQUIRES_SHARED(Locks::mutator_lock_) {
7862 bool have_default = false;
7863 switch (GetState()) {
7864 case State::kDefaultSingle:
7865 have_default = true;
7866 FALLTHROUGH_INTENDED;
7867 case State::kAbstractSingle:
7868 if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7869 return; // The main method masks the `interface_method`.
7870 }
7871 if (!interface_method->IsDefault()) {
7872 SetState(have_default ? State::kDefault : State::kAbstract);
7873 return;
7874 }
7875 break;
7876 case State::kDefault:
7877 have_default = true;
7878 FALLTHROUGH_INTENDED;
7879 case State::kAbstract:
7880 if (!interface_method->IsDefault()) {
7881 return; // Keep the same state. We do not need to check for masking.
7882 }
7883 // We do not record all overriding methods, so we need to walk over all
7884 // interfaces that could mask the `interface_method`. The provided
7885 // predicate `slow_is_masked()` does that.
7886 if (slow_is_masked()) {
7887 return; // Found an overriding method that masks `interface_method`.
7888 }
7889 break;
7890 case State::kDefaultConflict:
7891 return; // The state cannot change anymore.
7892 default:
7893 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7894 UNREACHABLE();
7895 }
7896 // We have a new default method that's not masked by any other method.
7897 DCHECK(interface_method->IsDefault());
7898 if (have_default) {
7899 SetState(State::kDefaultConflict);
7900 } else {
7901 SetMainMethod(interface_method);
7902 SetState(State::kDefault);
7903 }
7904 }
7905
7906 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7907 // that declares a method with the same name and signature as 'interface_method'.
7908 //
7909 // Arguments
7910 // - iftable: The iftable we are searching for an overriding method.
7911 // - begin: The start of the range to search.
7912 // - end: The end of the range to search.
7913 // - iface: The interface we are checking to see if anything overrides.
7914 // - interface_method:
7915 // The interface method providing a name and signature we're searching for.
7916 //
7917 // Returns whether an overriding method was found in any subinterface of `iface`.
ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,ArtMethod * interface_method)7918 static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7919 size_t begin,
7920 size_t end,
7921 ObjPtr<mirror::Class> iface,
7922 ArtMethod* interface_method)
7923 REQUIRES_SHARED(Locks::mutator_lock_) {
7924 for (size_t i = begin; i != end; ++i) {
7925 ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7926 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7927 if (MethodSignatureEquals(¤t_method, interface_method)) {
7928 // Check if the i'th interface is a subtype of this one.
7929 if (current_iface->Implements(iface)) {
7930 return true;
7931 }
7932 break;
7933 }
7934 }
7935 }
7936 return false;
7937 }
7938
7939 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7940 // that declares a method implemented by 'target'. This is an optimized version of
7941 // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7942 // of comparing signatures for declared interface methods.
7943 //
7944 // Arguments
7945 // - iftable: The iftable we are searching for an overriding method.
7946 // - begin: The start of the range to search.
7947 // - end: The end of the range to search.
7948 // - iface: The interface we are checking to see if anything overrides.
7949 // - target: The implementation method we're searching for.
7950 // Note that the new `iftable` is filled with vtable indexes for new interfaces,
7951 // so this needs to be the vtable index if we're searching that range.
7952 //
7953 // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7954 template <typename TargetType>
ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,size_t begin,size_t end,ObjPtr<mirror::Class> iface,TargetType target)7955 static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7956 size_t begin,
7957 size_t end,
7958 ObjPtr<mirror::Class> iface,
7959 TargetType target)
7960 REQUIRES_SHARED(Locks::mutator_lock_) {
7961 for (size_t i = begin; i != end; ++i) {
7962 if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7963 iftable->GetInterface(i)->Implements(iface)) {
7964 return true;
7965 }
7966 }
7967 return false;
7968 }
7969
7970 template <typename TargetType>
MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,TargetType target)7971 static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7972 TargetType target)
7973 REQUIRES_SHARED(Locks::mutator_lock_) {
7974 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7975 for (size_t j = 0; j != num_methods; ++j) {
7976 if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
7977 return {true, j};
7978 }
7979 }
7980 return {false, 0};
7981 }
7982
7983 ArtMethod* main_method_;
7984 uint32_t method_index_;
7985 State state_;
7986 };
7987
7988 class CopiedMethodRecordEmptyFn {
7989 public:
MakeEmpty(CopiedMethodRecord & item) const7990 void MakeEmpty(CopiedMethodRecord& item) const {
7991 item = CopiedMethodRecord();
7992 }
IsEmpty(const CopiedMethodRecord & item) const7993 bool IsEmpty(const CopiedMethodRecord& item) const {
7994 return item.GetMainMethod() == nullptr;
7995 }
7996 };
7997
7998 class CopiedMethodRecordHash {
7999 public:
8000 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(ArtMethod * method) const8001 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
8002 DCHECK(method != nullptr);
8003 return ComputeMethodHash(method);
8004 }
8005
8006 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & record) const8007 size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
8008 return (*this)(record.GetMainMethod());
8009 }
8010 };
8011
8012 class CopiedMethodRecordEqual {
8013 public:
8014 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,ArtMethod * rhs) const8015 bool operator()(const CopiedMethodRecord& lhs_record,
8016 ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
8017 ArtMethod* lhs = lhs_record.GetMainMethod();
8018 DCHECK(lhs != nullptr);
8019 DCHECK(rhs != nullptr);
8020 return MethodSignatureEquals(lhs, rhs);
8021 }
8022
8023 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
operator ()(const CopiedMethodRecord & lhs_record,const CopiedMethodRecord & rhs_record) const8024 bool operator()(const CopiedMethodRecord& lhs_record,
8025 const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
8026 return (*this)(lhs_record, rhs_record.GetMainMethod());
8027 }
8028 };
8029
8030 using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
8031 CopiedMethodRecordEmptyFn,
8032 CopiedMethodRecordHash,
8033 CopiedMethodRecordEqual>;
8034
8035 static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
8036 static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
8037
8038 ClassLinker* class_linker_;
8039 Handle<mirror::Class> klass_;
8040 Thread* const self_;
8041 Runtime* const runtime_;
8042
8043 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
8044 // the virtual methods array.
8045 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
8046 // during cross compilation.
8047 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
8048 ArenaStack stack_;
8049 ScopedArenaAllocator allocator_;
8050
8051 // If there are multiple methods with the same signature in the superclass vtable
8052 // (which can happen with a new virtual method having the same signature as an
8053 // inaccessible package-private method from another package in the superclass),
8054 // we keep singly-linked lists in this single array that maps vtable index to the
8055 // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
8056 ArrayRef<uint32_t> same_signature_vtable_lists_;
8057
8058 // Avoid large allocation for a few copied method records.
8059 // Keep the initial buffer on the stack to avoid arena allocations
8060 // if there are no special cases (the first arena allocation is costly).
8061 static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
8062 CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
8063 CopiedMethodRecordSet copied_method_records_;
8064 size_t num_new_copied_methods_;
8065 };
8066
8067 template <PointerSize kPointerSize>
8068 NO_INLINE
ReallocMethods(ObjPtr<mirror::Class> klass)8069 void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
8070 // There should be no thread suspension in this function,
8071 // native allocations do not cause thread suspension.
8072 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
8073
8074 size_t num_new_copied_methods = num_new_copied_methods_;
8075 DCHECK_NE(num_new_copied_methods, 0u);
8076 const size_t old_method_count = klass->NumMethods();
8077 const size_t new_method_count = old_method_count + num_new_copied_methods;
8078
8079 // Attempt to realloc to save RAM if possible.
8080 LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
8081 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
8082 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
8083 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
8084 // CopyFrom has internal read barriers.
8085 //
8086 // TODO We should maybe move some of this into mirror::Class or at least into another method.
8087 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
8088 kMethodSize,
8089 kMethodAlignment);
8090 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
8091 kMethodSize,
8092 kMethodAlignment);
8093 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
8094 LinearAlloc* allocator = class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader());
8095 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(allocator->Realloc(
8096 self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
8097 CHECK(methods != nullptr); // Native allocation failure aborts.
8098
8099 if (methods != old_methods) {
8100 if (gUseReadBarrier) {
8101 StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
8102 // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
8103 // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
8104 for (auto& m : klass->GetMethods(kPointerSize)) {
8105 out->CopyFrom(&m, kPointerSize);
8106 ++out;
8107 }
8108 } else if (gUseUserfaultfd) {
8109 // In order to make compaction code skip updating the declaring_class_ in
8110 // old_methods, convert it into a 'no GC-root' array.
8111 allocator->ConvertToNoGcRoots(old_methods, LinearAllocKind::kArtMethodArray);
8112 }
8113 }
8114
8115 // Collect and sort copied method records by the vtable index. This places overriding
8116 // copied methods first, sorted by the vtable index already assigned in the superclass,
8117 // followed by copied methods with new signatures in the order in which we encountered
8118 // them when going over virtual methods of new interfaces.
8119 // This order is deterministic but implementation-defined.
8120 //
8121 // Avoid arena allocation for a few records (the first arena allocation is costly).
8122 constexpr size_t kSortedRecordsBufferSize = 16;
8123 CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
8124 CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
8125 ? sorted_records_buffer
8126 : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
8127 size_t filled_sorted_records = 0u;
8128 for (CopiedMethodRecord& record : copied_method_records_) {
8129 if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
8130 DCHECK_LT(filled_sorted_records, num_new_copied_methods);
8131 sorted_records[filled_sorted_records] = &record;
8132 ++filled_sorted_records;
8133 }
8134 }
8135 DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
8136 std::sort(sorted_records,
8137 sorted_records + num_new_copied_methods,
8138 [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
8139 return lhs->GetMethodIndex() < rhs->GetMethodIndex();
8140 });
8141
8142 if (klass->IsInterface()) {
8143 // Some records may have been pruned. Update method indexes in collected records.
8144 size_t interface_method_index = klass->NumDeclaredVirtualMethods();
8145 for (size_t i = 0; i != num_new_copied_methods; ++i) {
8146 CopiedMethodRecord* record = sorted_records[i];
8147 DCHECK_LE(interface_method_index, record->GetMethodIndex());
8148 record->SetMethodIndex(interface_method_index);
8149 ++interface_method_index;
8150 }
8151 }
8152
8153 // Add copied methods.
8154 methods->SetSize(new_method_count);
8155 for (size_t i = 0; i != num_new_copied_methods; ++i) {
8156 const CopiedMethodRecord* record = sorted_records[i];
8157 ArtMethod* interface_method = record->GetMainMethod();
8158 DCHECK(!interface_method->IsCopied());
8159 ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
8160 new_method.CopyFrom(interface_method, kPointerSize);
8161 new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
8162 switch (record->GetState()) {
8163 case CopiedMethodRecord::State::kAbstractSingle:
8164 case CopiedMethodRecord::State::kAbstract: {
8165 DCHECK(!klass->IsInterface()); // We do not create miranda methods for interfaces.
8166 uint32_t access_flags = new_method.GetAccessFlags();
8167 DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
8168 << "Miranda method should be abstract but not intrinsic or default!";
8169 new_method.SetAccessFlags(access_flags | kAccCopied);
8170 break;
8171 }
8172 case CopiedMethodRecord::State::kDefaultSingle:
8173 case CopiedMethodRecord::State::kDefault: {
8174 DCHECK(!klass->IsInterface()); // We do not copy default methods for interfaces.
8175 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
8176 // verified yet it shouldn't have methods that are skipping access checks.
8177 // TODO This is rather arbitrary. We should maybe support classes where only some of its
8178 // methods are skip_access_checks.
8179 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
8180 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
8181 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
8182 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
8183 break;
8184 }
8185 case CopiedMethodRecord::State::kDefaultConflict: {
8186 // This is a type of default method (there are default method impls, just a conflict)
8187 // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
8188 // invokable copied default method without using a separate access flag but the default
8189 // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
8190 // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
8191 // verified yet it shouldn't have methods that are skipping access checks. Also clear
8192 // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
8193 uint32_t access_flags = new_method.GetAccessFlags();
8194 DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
8195 constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
8196 constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
8197 new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
8198 new_method.SetDataPtrSize(nullptr, kPointerSize);
8199 DCHECK(new_method.IsDefaultConflicting());
8200 DCHECK(!new_method.IsAbstract());
8201 // The actual method might or might not be marked abstract since we just copied it from
8202 // a (possibly default) interface method. We need to set its entry point to be the bridge
8203 // so that the compiler will not invoke the implementation of whatever method we copied
8204 // from.
8205 EnsureThrowsInvocationError(class_linker_, &new_method);
8206 break;
8207 }
8208 default:
8209 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
8210 UNREACHABLE();
8211 }
8212 }
8213
8214 if (VLOG_IS_ON(class_linker)) {
8215 LogNewVirtuals(methods);
8216 }
8217
8218 class_linker_->UpdateClassMethods(klass, methods);
8219 }
8220
8221 template <PointerSize kPointerSize>
FinalizeIfTable(Handle<mirror::Class> klass,MutableHandle<mirror::IfTable> iftable,Handle<mirror::PointerArray> vtable,bool is_klass_abstract,bool is_super_abstract,bool * out_new_conflict,ArtMethod ** out_imt)8222 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
8223 Handle<mirror::Class> klass,
8224 MutableHandle<mirror::IfTable> iftable,
8225 Handle<mirror::PointerArray> vtable,
8226 bool is_klass_abstract,
8227 bool is_super_abstract,
8228 bool* out_new_conflict,
8229 ArtMethod** out_imt) {
8230 size_t ifcount = iftable->Count();
8231 // We do not need a read barrier here as the length is constant, both from-space and
8232 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8233 size_t super_ifcount =
8234 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8235
8236 ClassLinker* class_linker = nullptr;
8237 ArtMethod* unimplemented_method = nullptr;
8238 ArtMethod* imt_conflict_method = nullptr;
8239 uintptr_t imt_methods_begin = 0u;
8240 size_t imt_methods_size = 0u;
8241 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8242 DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
8243 if (!is_klass_abstract) {
8244 class_linker = class_linker_;
8245 unimplemented_method = runtime_->GetImtUnimplementedMethod();
8246 imt_conflict_method = runtime_->GetImtConflictMethod();
8247 if (is_super_abstract) {
8248 // There was no IMT in superclass to copy to `out_imt[]`, so we need
8249 // to fill it with all implementation methods from superclass.
8250 DCHECK_EQ(imt_methods_begin, 0u);
8251 imt_methods_size = std::numeric_limits<size_t>::max(); // No method at the last byte.
8252 } else {
8253 // If the superclass has IMT, we have already copied it to `out_imt[]` and
8254 // we do not need to call `SetIMTRef()` for interfaces from superclass when
8255 // the implementation method is already in the superclass, only for new methods.
8256 // For simplicity, use the entire method array including direct methods.
8257 LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
8258 if (new_methods != nullptr) {
8259 DCHECK_NE(new_methods->size(), 0u);
8260 imt_methods_begin =
8261 reinterpret_cast<uintptr_t>(&new_methods->At(0, kMethodSize, kMethodAlignment));
8262 imt_methods_size = new_methods->size() * kMethodSize;
8263 }
8264 }
8265 }
8266
8267 auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
8268 REQUIRES_SHARED(Locks::mutator_lock_) {
8269 // Place method in imt if entry is empty, place conflict otherwise.
8270 ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
8271 class_linker->SetIMTRef(unimplemented_method,
8272 imt_conflict_method,
8273 implementation,
8274 /*out*/out_new_conflict,
8275 /*out*/imt_ptr);
8276 };
8277
8278 // For interfaces inherited from superclass, the new method arrays are empty,
8279 // so use vtable indexes from implementation methods from the superclass method array.
8280 for (size_t i = 0; i != super_ifcount; ++i) {
8281 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8282 DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
8283 if (method_array == nullptr) {
8284 continue;
8285 }
8286 size_t num_methods = method_array->GetLength();
8287 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8288 size_t j = 0;
8289 // First loop has method array shared with the super class.
8290 for (; j != num_methods; ++j) {
8291 ArtMethod* super_implementation =
8292 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8293 size_t vtable_index = super_implementation->GetMethodIndex();
8294 ArtMethod* implementation =
8295 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8296 // Check if we need to update IMT with this method, see above.
8297 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8298 update_imt(iface, j, implementation);
8299 }
8300 if (implementation != super_implementation) {
8301 // Copy-on-write and move to the next loop.
8302 Thread* self = self_;
8303 StackHandleScope<2u> hs(self);
8304 Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
8305 HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
8306 if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
8307 ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
8308 mirror::ObjectArray<mirror::Object>::CopyOf(
8309 iftable, self, ifcount * mirror::IfTable::kMax));
8310 if (new_iftable == nullptr) {
8311 return false;
8312 }
8313 iftable.Assign(new_iftable);
8314 }
8315 method_array = ObjPtr<mirror::PointerArray>::DownCast(
8316 mirror::Array::CopyOf(old_method_array, self, num_methods));
8317 if (method_array == nullptr) {
8318 return false;
8319 }
8320 iftable->SetMethodArray(i, method_array);
8321 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8322 ++j;
8323 break;
8324 }
8325 }
8326 // Second loop (if non-empty) has method array different from the superclass.
8327 for (; j != num_methods; ++j) {
8328 ArtMethod* super_implementation =
8329 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
8330 size_t vtable_index = super_implementation->GetMethodIndex();
8331 ArtMethod* implementation =
8332 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8333 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8334 // Check if we need to update IMT with this method, see above.
8335 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
8336 update_imt(iface, j, implementation);
8337 }
8338 }
8339 }
8340
8341 // New interface method arrays contain vtable indexes. Translate them to methods.
8342 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8343 for (size_t i = super_ifcount; i != ifcount; ++i) {
8344 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8345 if (method_array == nullptr) {
8346 continue;
8347 }
8348 size_t num_methods = method_array->GetLength();
8349 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8350 for (size_t j = 0; j != num_methods; ++j) {
8351 size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
8352 ArtMethod* implementation =
8353 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
8354 method_array->SetElementPtrSize(j, implementation, kPointerSize);
8355 if (!is_klass_abstract) {
8356 update_imt(iface, j, implementation);
8357 }
8358 }
8359 }
8360
8361 return true;
8362 }
8363
8364 template <PointerSize kPointerSize>
AllocPointerArray(Thread * self,size_t length)8365 ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
8366 Thread* self, size_t length) {
8367 using PointerArrayType = std::conditional_t<
8368 kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
8369 ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
8370 return ObjPtr<mirror::PointerArray>::DownCast(array);
8371 }
8372
8373 template <PointerSize kPointerSize>
AllocateIfTableMethodArrays(Thread * self,Handle<mirror::Class> klass,Handle<mirror::IfTable> iftable)8374 bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
8375 Thread* self,
8376 Handle<mirror::Class> klass,
8377 Handle<mirror::IfTable> iftable) {
8378 DCHECK(!klass->IsInterface());
8379 DCHECK(klass_->HasSuperClass());
8380 const size_t ifcount = iftable->Count();
8381 // We do not need a read barrier here as the length is constant, both from-space and
8382 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8383 size_t super_ifcount =
8384 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8385 if (ifcount == super_ifcount) {
8386 DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
8387 return true;
8388 }
8389
8390 if (kIsDebugBuild) {
8391 // The method array references for superclass interfaces have been copied.
8392 // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
8393 ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
8394 for (size_t i = 0; i != super_ifcount; ++i) {
8395 CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
8396 CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
8397 }
8398 }
8399
8400 for (size_t i = super_ifcount; i < ifcount; ++i) {
8401 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8402 if (num_methods > 0) {
8403 ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
8404 if (UNLIKELY(method_array == nullptr)) {
8405 self->AssertPendingOOMException();
8406 return false;
8407 }
8408 iftable->SetMethodArray(i, method_array);
8409 }
8410 }
8411 return true;
8412 }
8413
8414 template <PointerSize kPointerSize>
AssignVTableIndexes(ObjPtr<mirror::Class> klass,ObjPtr<mirror::Class> super_class,bool is_super_abstract,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8415 size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
8416 ObjPtr<mirror::Class> klass,
8417 ObjPtr<mirror::Class> super_class,
8418 bool is_super_abstract,
8419 size_t num_virtual_methods,
8420 ObjPtr<mirror::IfTable> iftable) {
8421 DCHECK(!klass->IsInterface());
8422 DCHECK(klass->HasSuperClass());
8423 DCHECK(klass->GetSuperClass() == super_class);
8424
8425 // There should be no thread suspension unless we want to throw an exception.
8426 // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
8427 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8428
8429 // Prepare a hash table with virtual methods from the superclass.
8430 // For the unlikely cases that there are multiple methods with the same signature
8431 // but different vtable indexes, keep an array with indexes of the previous
8432 // methods with the same signature (walked as singly-linked lists).
8433 uint8_t* raw_super_vtable;
8434 size_t super_vtable_length;
8435 if (is_super_abstract) {
8436 DCHECK(!super_class->ShouldHaveEmbeddedVTable());
8437 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
8438 DCHECK(super_vtable != nullptr);
8439 raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
8440 mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
8441 super_vtable_length = super_vtable->GetLength();
8442 } else {
8443 DCHECK(super_class->ShouldHaveEmbeddedVTable());
8444 raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
8445 mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
8446 super_vtable_length = super_class->GetEmbeddedVTableLength();
8447 }
8448 VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
8449 static constexpr double kMinLoadFactor = 0.3;
8450 static constexpr double kMaxLoadFactor = 0.5;
8451 static constexpr size_t kMaxStackBuferSize = 256;
8452 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8453 const size_t super_vtable_buffer_size = super_vtable_length * 3;
8454 const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
8455 const size_t total_size =
8456 declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
8457
8458 uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
8459 ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
8460 : allocator_.AllocArray<uint32_t>(total_size);
8461 uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
8462
8463 DeclaredVirtualSignatureSet declared_virtual_signatures(
8464 kMinLoadFactor,
8465 kMaxLoadFactor,
8466 DeclaredVirtualSignatureHash(klass),
8467 DeclaredVirtualSignatureEqual(klass),
8468 declared_virtuals_buffer_ptr,
8469 declared_virtuals_buffer_size,
8470 allocator_.Adapter());
8471
8472 ArrayRef<uint32_t> same_signature_vtable_lists;
8473 const bool is_proxy_class = klass->IsProxyClass();
8474 size_t vtable_length = super_vtable_length;
8475
8476 // Record which declared methods are overriding a super method.
8477 BitVector initialized_methods(/* expandable= */ false,
8478 Allocator::GetNoopAllocator(),
8479 bit_vector_size,
8480 bit_vector_buffer_ptr);
8481
8482 // Note: our sets hash on the method name, and therefore we pay a high
8483 // performance price when a class has many overloads.
8484 //
8485 // We populate a set of declared signatures instead of signatures from the
8486 // super vtable (which is only lazy populated in case of interface overriding,
8487 // see below). This makes sure that we pay the performance price only on that
8488 // class, and not on its subclasses (except in the case of interface overriding, see below).
8489 for (size_t i = 0; i != num_virtual_methods; ++i) {
8490 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8491 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8492 ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8493 ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8494 : virtual_method;
8495 size_t hash = ComputeMethodHash(signature_method);
8496 declared_virtual_signatures.PutWithHash(i, hash);
8497 }
8498
8499 // Loop through each super vtable method and see if they are overridden by a method we added to
8500 // the hash table.
8501 for (size_t j = 0; j < super_vtable_length; ++j) {
8502 // Search the hash table to see if we are overridden by any method.
8503 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
8504 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
8505 super_method->GetAccessFlags())) {
8506 // Continue on to the next method since this one is package private and cannot be overridden.
8507 // Before Android 4.1, the package-private method super_method might have been incorrectly
8508 // overridden.
8509 continue;
8510 }
8511 size_t hash = (j < mirror::Object::kVTableLength)
8512 ? class_linker_->object_virtual_method_hashes_[j]
8513 : ComputeMethodHash(super_method);
8514 auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
8515 if (it == declared_virtual_signatures.end()) {
8516 continue;
8517 }
8518 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
8519 if (super_method->IsFinal()) {
8520 sants.reset();
8521 ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8522 virtual_method->PrettyMethod().c_str(),
8523 super_method->GetDeclaringClassDescriptor());
8524 return 0u;
8525 }
8526 if (initialized_methods.IsBitSet(*it)) {
8527 // The method is overriding more than one method.
8528 // We record that information in a linked list to later set the method in the vtable
8529 // locations that are not the method index.
8530 if (same_signature_vtable_lists.empty()) {
8531 same_signature_vtable_lists = ArrayRef<uint32_t>(
8532 allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8533 std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8534 same_signature_vtable_lists_ = same_signature_vtable_lists;
8535 }
8536 same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
8537 } else {
8538 initialized_methods.SetBit(*it);
8539 }
8540
8541 // We arbitrarily set to the largest index. This is also expected when
8542 // iterating over the `same_signature_vtable_lists_`.
8543 virtual_method->SetMethodIndex(j);
8544 }
8545
8546 // Add the non-overridden methods at the end.
8547 for (size_t i = 0; i < num_virtual_methods; ++i) {
8548 if (!initialized_methods.IsBitSet(i)) {
8549 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8550 local_method->SetMethodIndex(vtable_length);
8551 vtable_length++;
8552 }
8553 }
8554
8555 // A lazily constructed super vtable set, which we only populate in the less
8556 // common sittuation of a superclass implementing a method declared in an
8557 // interface this class inherits.
8558 // We still try to allocate the set on the stack as using the arena will have
8559 // a larger cost.
8560 uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
8561 VTableSignatureSet super_vtable_signatures(
8562 kMinLoadFactor,
8563 kMaxLoadFactor,
8564 VTableSignatureHash(super_vtable_accessor),
8565 VTableSignatureEqual(super_vtable_accessor),
8566 super_vtable_buffer_ptr,
8567 super_vtable_buffer_size,
8568 allocator_.Adapter());
8569
8570 // Assign vtable indexes for interface methods in new interfaces and store them
8571 // in implementation method arrays. These shall be replaced by actual method
8572 // pointers later. We do not need to do this for superclass interfaces as we can
8573 // get these vtable indexes from implementation methods in superclass iftable.
8574 // Record data for copied methods which shall be referenced by the vtable.
8575 const size_t ifcount = iftable->Count();
8576 ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8577 const size_t super_ifcount = super_iftable->Count();
8578 for (size_t i = ifcount; i != super_ifcount; ) {
8579 --i;
8580 DCHECK_LT(i, ifcount);
8581 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8582 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8583 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8584 for (size_t j = 0; j != num_methods; ++j) {
8585 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8586 size_t hash = ComputeMethodHash(interface_method);
8587 ArtMethod* vtable_method = nullptr;
8588 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8589 if (it1 != declared_virtual_signatures.end()) {
8590 ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8591 // For interface overriding, we only look at public methods.
8592 if (found_method->IsPublic()) {
8593 vtable_method = found_method;
8594 }
8595 } else {
8596 // This situation should be rare (a superclass implements a method
8597 // declared in an interface this class is inheriting). Only in this case
8598 // do we lazily populate the super_vtable_signatures.
8599 if (super_vtable_signatures.empty()) {
8600 for (size_t k = 0; k < super_vtable_length; ++k) {
8601 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
8602 if (!super_method->IsPublic()) {
8603 // For interface overriding, we only look at public methods.
8604 continue;
8605 }
8606 size_t super_hash = (k < mirror::Object::kVTableLength)
8607 ? class_linker_->object_virtual_method_hashes_[k]
8608 : ComputeMethodHash(super_method);
8609 auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
8610 DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
8611 }
8612 }
8613 auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8614 if (it2 != super_vtable_signatures.end()) {
8615 vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
8616 }
8617 }
8618
8619 uint32_t vtable_index = vtable_length;
8620 if (vtable_method != nullptr) {
8621 vtable_index = vtable_method->GetMethodIndexDuringLinking();
8622 if (!vtable_method->IsOverridableByDefaultMethod()) {
8623 method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8624 continue;
8625 }
8626 }
8627
8628 auto [it, inserted] = copied_method_records_.InsertWithHash(
8629 CopiedMethodRecord(interface_method, vtable_index), hash);
8630 if (vtable_method != nullptr) {
8631 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8632 } else if (inserted) {
8633 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8634 DCHECK_EQ(vtable_index, vtable_length);
8635 ++vtable_length;
8636 } else {
8637 vtable_index = it->GetMethodIndex();
8638 }
8639 method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8640 if (inserted) {
8641 it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8642 : CopiedMethodRecord::State::kDefaultSingle);
8643 } else {
8644 it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8645 }
8646 }
8647 }
8648 // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8649 size_t num_new_copied_methods = copied_method_records_.size();
8650 for (CopiedMethodRecord& record : copied_method_records_) {
8651 uint32_t vtable_index = record.GetMethodIndex();
8652 if (vtable_index < super_vtable_length) {
8653 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8654 DCHECK(super_method->IsOverridableByDefaultMethod());
8655 record.FinalizeState(
8656 super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8657 if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8658 --num_new_copied_methods;
8659 }
8660 }
8661 }
8662 num_new_copied_methods_ = num_new_copied_methods;
8663
8664 if (UNLIKELY(!IsUint<16>(vtable_length))) {
8665 sants.reset();
8666 ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8667 return 0u;
8668 }
8669
8670 return vtable_length;
8671 }
8672
8673 template <PointerSize kPointerSize>
FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,size_t num_virtual_methods,ObjPtr<mirror::IfTable> iftable)8674 bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8675 ObjPtr<mirror::Class> klass,
8676 size_t num_virtual_methods,
8677 ObjPtr<mirror::IfTable> iftable) {
8678 DCHECK(klass->IsInterface());
8679 DCHECK(klass->HasSuperClass());
8680 DCHECK(klass->GetSuperClass()->IsObjectClass());
8681 DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8682
8683 // There should be no thread suspension unless we want to throw an exception.
8684 // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8685 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8686
8687 // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8688 // from superinterfaces, so we can filter out matching superinterface methods.
8689 static constexpr double kMinLoadFactor = 0.3;
8690 static constexpr double kMaxLoadFactor = 0.5;
8691 static constexpr size_t kMaxStackBuferSize = 256;
8692 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8693 uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8694 ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8695 : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8696 DeclaredVirtualSignatureSet declared_virtual_signatures(
8697 kMinLoadFactor,
8698 kMaxLoadFactor,
8699 DeclaredVirtualSignatureHash(klass),
8700 DeclaredVirtualSignatureEqual(klass),
8701 declared_virtuals_buffer_ptr,
8702 declared_virtuals_buffer_size,
8703 allocator_.Adapter());
8704 for (size_t i = 0; i != num_virtual_methods; ++i) {
8705 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8706 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8707 size_t hash = ComputeMethodHash(virtual_method);
8708 declared_virtual_signatures.PutWithHash(i, hash);
8709 }
8710
8711 // We do not create miranda methods for interface classes, so we do not need to track
8712 // non-default (abstract) interface methods. The downside is that we cannot use the
8713 // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8714 // we do not fill method arrays for interfaces, the method search actually has to
8715 // compare signatures instead of searching for the implementing method.
8716 const size_t ifcount = iftable->Count();
8717 size_t new_method_index = num_virtual_methods;
8718 for (size_t i = ifcount; i != 0u; ) {
8719 --i;
8720 DCHECK_LT(i, ifcount);
8721 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8722 if (!iface->HasDefaultMethods()) {
8723 continue; // No default methods to process.
8724 }
8725 size_t num_methods = iface->NumDeclaredVirtualMethods();
8726 for (size_t j = 0; j != num_methods; ++j) {
8727 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8728 if (!interface_method->IsDefault()) {
8729 continue; // Do not process this non-default method.
8730 }
8731 size_t hash = ComputeMethodHash(interface_method);
8732 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8733 if (it1 != declared_virtual_signatures.end()) {
8734 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8735 if (!virtual_method->IsAbstract() && !virtual_method->IsPublic()) {
8736 sants.reset();
8737 ThrowIllegalAccessErrorForImplementingMethod(klass, virtual_method, interface_method);
8738 return false;
8739 }
8740 continue; // This default method is masked by a method declared in this interface.
8741 }
8742
8743 CopiedMethodRecord new_record(interface_method, new_method_index);
8744 auto it = copied_method_records_.FindWithHash(new_record, hash);
8745 if (it == copied_method_records_.end()) {
8746 // Pretend that there is another default method and try to update the state.
8747 // If the `interface_method` is not masked, the state shall change to
8748 // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8749 new_record.SetState(CopiedMethodRecord::State::kDefault);
8750 new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8751 if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8752 // Insert the new record with the state `kDefault`.
8753 new_record.SetState(CopiedMethodRecord::State::kDefault);
8754 copied_method_records_.PutWithHash(new_record, hash);
8755 DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8756 ++new_method_index;
8757 }
8758 } else {
8759 it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8760 }
8761 }
8762 }
8763
8764 // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8765 // We do not copy normal default methods to subinterfaces, instead we find the
8766 // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8767 size_t num_new_copied_methods = copied_method_records_.size();
8768 for (CopiedMethodRecord& record : copied_method_records_) {
8769 if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8770 DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8771 record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8772 --num_new_copied_methods;
8773 }
8774 }
8775 num_new_copied_methods_ = num_new_copied_methods;
8776
8777 return true;
8778 }
8779
8780
8781 template <PointerSize kPointerSize>
8782 FLATTEN
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)8783 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
8784 Thread* self,
8785 Handle<mirror::Class> klass,
8786 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8787 bool* out_new_conflict,
8788 ArtMethod** out_imt) {
8789 const size_t num_virtual_methods = klass->NumVirtualMethods();
8790 if (klass->IsInterface()) {
8791 // No vtable.
8792 if (!IsUint<16>(num_virtual_methods)) {
8793 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8794 return false;
8795 }
8796 // Assign each method an interface table index and set the default flag.
8797 bool has_defaults = false;
8798 for (size_t i = 0; i < num_virtual_methods; ++i) {
8799 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8800 m->SetMethodIndex(i);
8801 uint32_t access_flags = m->GetAccessFlags();
8802 DCHECK(!ArtMethod::IsDefault(access_flags));
8803 DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
8804 if (ArtMethod::IsInvokable(access_flags)) {
8805 // If the dex file does not support default methods, throw ClassFormatError.
8806 // This check is necessary to protect from odd cases, such as native default
8807 // methods, that the dex file verifier permits for old dex file versions. b/157170505
8808 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8809 // currently running CTS tests for default methods with dex file version 035 which
8810 // does not support default methods. So, we limit this to native methods. b/157718952
8811 if (ArtMethod::IsNative(access_flags)) {
8812 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8813 ThrowClassFormatError(klass.Get(),
8814 "Dex file does not support default method '%s'",
8815 m->PrettyMethod().c_str());
8816 return false;
8817 }
8818 if (!ArtMethod::IsPublic(access_flags)) {
8819 // The verifier should have caught the non-public method for dex version 37.
8820 // Just warn and skip it since this is from before default-methods so we don't
8821 // really need to care that it has code.
8822 LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8823 << "This will be a fatal error in subsequent versions of android. "
8824 << "Continuing anyway.";
8825 }
8826 m->SetAccessFlags(access_flags | kAccDefault);
8827 has_defaults = true;
8828 }
8829 }
8830 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8831 // during initialization. This is a performance optimization. We could simply traverse the
8832 // virtual_methods_ array again during initialization.
8833 if (has_defaults) {
8834 klass->SetHasDefaultMethods();
8835 }
8836 ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8837 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8838 if (UNLIKELY(iftable == nullptr)) {
8839 self->AssertPendingException();
8840 return false;
8841 }
8842 size_t ifcount = iftable->Count();
8843 bool have_super_with_defaults = false;
8844 for (size_t i = 0; i != ifcount; ++i) {
8845 if (iftable->GetInterface(i)->HasDefaultMethods()) {
8846 have_super_with_defaults = true;
8847 break;
8848 }
8849 }
8850 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8851 if (have_super_with_defaults) {
8852 if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8853 self->AssertPendingException();
8854 return false;
8855 }
8856 if (num_new_copied_methods_ != 0u) {
8857 // Re-check the number of methods.
8858 size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8859 if (!IsUint<16>(final_num_virtual_methods)) {
8860 ThrowClassFormatError(
8861 klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8862 return false;
8863 }
8864 ReallocMethods(klass.Get());
8865 }
8866 }
8867 klass->SetIfTable(iftable);
8868 if (kIsDebugBuild) {
8869 // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8870 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8871 }
8872 return true;
8873 } else if (LIKELY(klass->HasSuperClass())) {
8874 // We set up the interface lookup table now because we need it to determine if we need
8875 // to update any vtable entries with new default method implementations.
8876 StackHandleScope<3> hs(self);
8877 MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
8878 ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8879 : SetupInterfaceLookupTable(
8880 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8881 if (UNLIKELY(iftable == nullptr)) {
8882 self->AssertPendingException();
8883 return false;
8884 }
8885
8886 // Copy the IMT from superclass if present and needed. Update with new methods later.
8887 Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
8888 bool is_klass_abstract = klass->IsAbstract();
8889 bool is_super_abstract = super_class->IsAbstract();
8890 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8891 DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8892 if (!is_klass_abstract && !is_super_abstract) {
8893 ImTable* super_imt = super_class->GetImt(kPointerSize);
8894 for (size_t i = 0; i < ImTable::kSize; ++i) {
8895 out_imt[i] = super_imt->Get(i, kPointerSize);
8896 }
8897 }
8898
8899 // If there are no new virtual methods and no new interfaces, we can simply reuse
8900 // the vtable from superclass. We may need to make a copy if it's embedded.
8901 const size_t super_vtable_length = super_class->GetVTableLength();
8902 if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
8903 DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8904 if (is_super_abstract) {
8905 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8906 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8907 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8908 klass->SetVTable(super_vtable);
8909 // No IMT in the super class, we need to reconstruct it from the iftable.
8910 if (!is_klass_abstract && iftable->Count() != 0) {
8911 class_linker_->FillIMTFromIfTable(iftable.Get(),
8912 runtime_->GetImtUnimplementedMethod(),
8913 runtime_->GetImtConflictMethod(),
8914 klass.Get(),
8915 /*create_conflict_tables=*/false,
8916 /*ignore_copied_methods=*/false,
8917 out_new_conflict,
8918 out_imt);
8919 }
8920 } else {
8921 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
8922 if (UNLIKELY(vtable == nullptr)) {
8923 self->AssertPendingOOMException();
8924 return false;
8925 }
8926 for (size_t i = 0; i < super_vtable_length; i++) {
8927 vtable->SetElementPtrSize(
8928 i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8929 }
8930 klass->SetVTable(vtable);
8931 // The IMT was already copied from superclass if `klass` is not abstract.
8932 }
8933 klass->SetIfTable(iftable.Get());
8934 return true;
8935 }
8936
8937 // Allocate method arrays, so that we can link interface methods without thread suspension,
8938 // otherwise GC could miss visiting newly allocated copied methods.
8939 // TODO: Do not allocate copied methods during linking, store only records about what
8940 // we need to allocate and allocate it at the end. Start with superclass iftable and
8941 // perform copy-on-write when needed to facilitate maximum memory sharing.
8942 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8943 self->AssertPendingOOMException();
8944 return false;
8945 }
8946
8947 size_t final_vtable_size = AssignVTableIndexes(
8948 klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
8949 if (final_vtable_size == 0u) {
8950 self->AssertPendingException();
8951 return false;
8952 }
8953 DCHECK(IsUint<16>(final_vtable_size));
8954
8955 // Allocate the new vtable.
8956 Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
8957 if (UNLIKELY(vtable == nullptr)) {
8958 self->AssertPendingOOMException();
8959 return false;
8960 }
8961
8962 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8963 if (num_new_copied_methods_ != 0u) {
8964 ReallocMethods(klass.Get());
8965 }
8966
8967 // Store new virtual methods in the new vtable.
8968 ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
8969 for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
8970 uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
8971 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8972 if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
8973 // We may override more than one method according to JLS, see b/211854716.
8974 while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
8975 DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
8976 vtable_index = same_signature_vtable_lists[vtable_index];
8977 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8978 if (kIsDebugBuild) {
8979 ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
8980 DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
8981 current_method->GetAccessFlags()));
8982 DCHECK(!current_method->IsFinal());
8983 }
8984 }
8985 }
8986 }
8987
8988 // For non-overridden vtable slots, copy a method from `super_class`.
8989 for (size_t j = 0; j != super_vtable_length; ++j) {
8990 if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
8991 ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
8992 vtable->SetElementPtrSize(j, super_method, kPointerSize);
8993 }
8994 }
8995
8996 // Update the `iftable` (and IMT) with finalized virtual methods.
8997 if (!FinalizeIfTable(klass,
8998 iftable,
8999 vtable,
9000 is_klass_abstract,
9001 is_super_abstract,
9002 out_new_conflict,
9003 out_imt)) {
9004 self->AssertPendingOOMException();
9005 return false;
9006 }
9007
9008 klass->SetVTable(vtable.Get());
9009 klass->SetIfTable(iftable.Get());
9010 if (kIsDebugBuild) {
9011 CheckVTable(self, klass, kPointerSize);
9012 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
9013 }
9014 return true;
9015 } else {
9016 return LinkJavaLangObjectMethods(self, klass);
9017 }
9018 }
9019
9020 template <PointerSize kPointerSize>
LinkJavaLangObjectMethods(Thread * self,Handle<mirror::Class> klass)9021 bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
9022 Thread* self,
9023 Handle<mirror::Class> klass) {
9024 DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
9025 DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
9026 static_assert(IsUint<16>(mirror::Object::kVTableLength));
9027 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
9028 if (UNLIKELY(vtable == nullptr)) {
9029 self->AssertPendingOOMException();
9030 return false;
9031 }
9032 for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
9033 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
9034 vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
9035 virtual_method->SetMethodIndex(i);
9036 }
9037 klass->SetVTable(vtable);
9038 InitializeObjectVirtualMethodHashes(
9039 klass.Get(),
9040 kPointerSize,
9041 ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
9042 // The interface table is already allocated but there are no interface methods to link.
9043 DCHECK(klass->GetIfTable() != nullptr);
9044 DCHECK_EQ(klass->GetIfTableCount(), 0);
9045 return true;
9046 }
9047
9048 // Populate the class vtable and itable. Compute return type indices.
LinkMethods(Thread * self,Handle<mirror::Class> klass,Handle<mirror::ObjectArray<mirror::Class>> interfaces,bool * out_new_conflict,ArtMethod ** out_imt)9049 bool ClassLinker::LinkMethods(Thread* self,
9050 Handle<mirror::Class> klass,
9051 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
9052 bool* out_new_conflict,
9053 ArtMethod** out_imt) {
9054 self->AllowThreadSuspension();
9055 // Link virtual methods then interface methods.
9056 Runtime* const runtime = Runtime::Current();
9057 if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
9058 LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
9059 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9060 } else {
9061 constexpr PointerSize kOtherPointerSize =
9062 (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
9063 LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
9064 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
9065 }
9066 }
9067
9068 class ClassLinker::LinkFieldsHelper {
9069 public:
9070 static bool LinkFields(ClassLinker* class_linker,
9071 Thread* self,
9072 Handle<mirror::Class> klass,
9073 bool is_static,
9074 size_t* class_size)
9075 REQUIRES_SHARED(Locks::mutator_lock_);
9076
9077 private:
9078 enum class FieldTypeOrder : uint16_t;
9079 class FieldGaps;
9080
9081 struct FieldTypeOrderAndIndex {
9082 FieldTypeOrder field_type_order;
9083 uint16_t field_index;
9084 };
9085
9086 static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
9087
9088 template <size_t kSize>
9089 static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
9090 REQUIRES_SHARED(Locks::mutator_lock_);
9091 };
9092
9093 // We use the following order of field types for assigning offsets.
9094 // Some fields can be shuffled forward to fill gaps, see
9095 // `ClassLinker::LinkFieldsHelper::LinkFields()`.
9096 enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
9097 kReference = 0u,
9098 kLong,
9099 kDouble,
9100 kInt,
9101 kFloat,
9102 kChar,
9103 kShort,
9104 kBoolean,
9105 kByte,
9106
9107 kLast64BitType = kDouble,
9108 kLast32BitType = kFloat,
9109 kLast16BitType = kShort,
9110 };
9111
9112 ALWAYS_INLINE
9113 ClassLinker::LinkFieldsHelper::FieldTypeOrder
FieldTypeOrderFromFirstDescriptorCharacter(char first_char)9114 ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
9115 switch (first_char) {
9116 case 'J':
9117 return FieldTypeOrder::kLong;
9118 case 'D':
9119 return FieldTypeOrder::kDouble;
9120 case 'I':
9121 return FieldTypeOrder::kInt;
9122 case 'F':
9123 return FieldTypeOrder::kFloat;
9124 case 'C':
9125 return FieldTypeOrder::kChar;
9126 case 'S':
9127 return FieldTypeOrder::kShort;
9128 case 'Z':
9129 return FieldTypeOrder::kBoolean;
9130 case 'B':
9131 return FieldTypeOrder::kByte;
9132 default:
9133 DCHECK(first_char == 'L' || first_char == '[') << first_char;
9134 return FieldTypeOrder::kReference;
9135 }
9136 }
9137
9138 // Gaps where we can insert fields in object layout.
9139 class ClassLinker::LinkFieldsHelper::FieldGaps {
9140 public:
9141 template <uint32_t kSize>
AlignFieldOffset(MemberOffset field_offset)9142 ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
9143 static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
9144 if (!IsAligned<kSize>(field_offset.Uint32Value())) {
9145 uint32_t gap_start = field_offset.Uint32Value();
9146 field_offset = MemberOffset(RoundUp(gap_start, kSize));
9147 AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
9148 }
9149 return field_offset;
9150 }
9151
9152 template <uint32_t kSize>
HasGap() const9153 bool HasGap() const {
9154 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9155 return (kSize == 1u && gap1_offset_ != kNoOffset) ||
9156 (kSize <= 2u && gap2_offset_ != kNoOffset) ||
9157 gap4_offset_ != kNoOffset;
9158 }
9159
9160 template <uint32_t kSize>
ReleaseGap()9161 MemberOffset ReleaseGap() {
9162 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
9163 uint32_t result;
9164 if (kSize == 1u && gap1_offset_ != kNoOffset) {
9165 DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
9166 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
9167 result = gap1_offset_;
9168 gap1_offset_ = kNoOffset;
9169 } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
9170 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
9171 result = gap2_offset_;
9172 gap2_offset_ = kNoOffset;
9173 if (kSize < 2u) {
9174 AddGaps<1u>(result + kSize, result + 2u);
9175 }
9176 } else {
9177 DCHECK_NE(gap4_offset_, kNoOffset);
9178 result = gap4_offset_;
9179 gap4_offset_ = kNoOffset;
9180 if (kSize < 4u) {
9181 AddGaps<kSize | 2u>(result + kSize, result + 4u);
9182 }
9183 }
9184 return MemberOffset(result);
9185 }
9186
9187 private:
9188 template <uint32_t kGapsToCheck>
AddGaps(uint32_t gap_start,uint32_t gap_end)9189 void AddGaps(uint32_t gap_start, uint32_t gap_end) {
9190 if ((kGapsToCheck & 1u) != 0u) {
9191 DCHECK_LT(gap_start, gap_end);
9192 DCHECK_ALIGNED(gap_end, 2u);
9193 if ((gap_start & 1u) != 0u) {
9194 DCHECK_EQ(gap1_offset_, kNoOffset);
9195 gap1_offset_ = gap_start;
9196 gap_start += 1u;
9197 if (kGapsToCheck == 1u || gap_start == gap_end) {
9198 DCHECK_EQ(gap_start, gap_end);
9199 return;
9200 }
9201 }
9202 }
9203
9204 if ((kGapsToCheck & 2u) != 0u) {
9205 DCHECK_LT(gap_start, gap_end);
9206 DCHECK_ALIGNED(gap_start, 2u);
9207 DCHECK_ALIGNED(gap_end, 4u);
9208 if ((gap_start & 2u) != 0u) {
9209 DCHECK_EQ(gap2_offset_, kNoOffset);
9210 gap2_offset_ = gap_start;
9211 gap_start += 2u;
9212 if (kGapsToCheck <= 3u || gap_start == gap_end) {
9213 DCHECK_EQ(gap_start, gap_end);
9214 return;
9215 }
9216 }
9217 }
9218
9219 if ((kGapsToCheck & 4u) != 0u) {
9220 DCHECK_LT(gap_start, gap_end);
9221 DCHECK_ALIGNED(gap_start, 4u);
9222 DCHECK_ALIGNED(gap_end, 8u);
9223 DCHECK_EQ(gap_start + 4u, gap_end);
9224 DCHECK_EQ(gap4_offset_, kNoOffset);
9225 gap4_offset_ = gap_start;
9226 return;
9227 }
9228
9229 DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
9230 << " after checking " << kGapsToCheck;
9231 }
9232
9233 static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
9234
9235 uint32_t gap4_offset_ = kNoOffset;
9236 uint32_t gap2_offset_ = kNoOffset;
9237 uint32_t gap1_offset_ = kNoOffset;
9238 };
9239
9240 template <size_t kSize>
9241 ALWAYS_INLINE
AssignFieldOffset(ArtField * field,MemberOffset field_offset)9242 MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
9243 MemberOffset field_offset) {
9244 DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
9245 DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
9246 field->SetOffset(field_offset);
9247 return MemberOffset(field_offset.Uint32Value() + kSize);
9248 }
9249
LinkFields(ClassLinker * class_linker,Thread * self,Handle<mirror::Class> klass,bool is_static,size_t * class_size)9250 bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
9251 Thread* self,
9252 Handle<mirror::Class> klass,
9253 bool is_static,
9254 size_t* class_size) {
9255 self->AllowThreadSuspension();
9256 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
9257 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
9258 klass->GetIFieldsPtr();
9259
9260 // Initialize field_offset
9261 MemberOffset field_offset(0);
9262 if (is_static) {
9263 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
9264 class_linker->GetImagePointerSize());
9265 } else {
9266 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9267 if (super_class != nullptr) {
9268 CHECK(super_class->IsResolved())
9269 << klass->PrettyClass() << " " << super_class->PrettyClass();
9270 field_offset = MemberOffset(super_class->GetObjectSize());
9271 }
9272 }
9273
9274 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
9275
9276 // we want a relatively stable order so that adding new fields
9277 // minimizes disruption of C++ version such as Class and Method.
9278 //
9279 // The overall sort order order is:
9280 // 1) All object reference fields, sorted alphabetically.
9281 // 2) All java long (64-bit) integer fields, sorted alphabetically.
9282 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
9283 // 4) All java int (32-bit) integer fields, sorted alphabetically.
9284 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
9285 // 6) All java char (16-bit) integer fields, sorted alphabetically.
9286 // 7) All java short (16-bit) integer fields, sorted alphabetically.
9287 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
9288 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
9289 //
9290 // (References are first to increase the chance of reference visiting
9291 // being able to take a fast path using a bitmap of references at the
9292 // start of the object, see `Class::reference_instance_offsets_`.)
9293 //
9294 // Once the fields are sorted in this order we will attempt to fill any gaps
9295 // that might be present in the memory layout of the structure.
9296 // Note that we shall not fill gaps between the superclass fields.
9297
9298 // Collect fields and their "type order index" (see numbered points above).
9299 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
9300 "Using plain ArtField references");
9301 constexpr size_t kStackBufferEntries = 64; // Avoid allocations for small number of fields.
9302 FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
9303 std::vector<FieldTypeOrderAndIndex> heap_buffer;
9304 ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
9305 if (num_fields <= kStackBufferEntries) {
9306 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
9307 } else {
9308 heap_buffer.resize(num_fields);
9309 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
9310 }
9311 size_t num_reference_fields = 0;
9312 size_t primitive_fields_start = num_fields;
9313 DCHECK_LE(num_fields, 1u << 16);
9314 for (size_t i = 0; i != num_fields; ++i) {
9315 ArtField* field = &fields->At(i);
9316 const char* descriptor = field->GetTypeDescriptor();
9317 FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
9318 uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
9319 // Insert references to the start, other fields to the end.
9320 DCHECK_LT(num_reference_fields, primitive_fields_start);
9321 if (field_type_order == FieldTypeOrder::kReference) {
9322 sorted_fields[num_reference_fields] = { field_type_order, field_index };
9323 ++num_reference_fields;
9324 } else {
9325 --primitive_fields_start;
9326 sorted_fields[primitive_fields_start] = { field_type_order, field_index };
9327 }
9328 }
9329 DCHECK_EQ(num_reference_fields, primitive_fields_start);
9330
9331 // Reference fields are already sorted by field index (and dex field index).
9332 DCHECK(std::is_sorted(
9333 sorted_fields.begin(),
9334 sorted_fields.begin() + num_reference_fields,
9335 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9336 ArtField* lhs_field = &fields->At(lhs.field_index);
9337 ArtField* rhs_field = &fields->At(rhs.field_index);
9338 CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9339 CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9340 CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
9341 lhs.field_index < rhs.field_index);
9342 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9343 }));
9344 // Primitive fields were stored in reverse order of their field index (and dex field index).
9345 DCHECK(std::is_sorted(
9346 sorted_fields.begin() + primitive_fields_start,
9347 sorted_fields.end(),
9348 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9349 ArtField* lhs_field = &fields->At(lhs.field_index);
9350 ArtField* rhs_field = &fields->At(rhs.field_index);
9351 CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9352 CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
9353 CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
9354 lhs.field_index > rhs.field_index);
9355 return lhs.field_index > rhs.field_index;
9356 }));
9357 // Sort the primitive fields by the field type order, then field index.
9358 std::sort(sorted_fields.begin() + primitive_fields_start,
9359 sorted_fields.end(),
9360 [](const auto& lhs, const auto& rhs) {
9361 if (lhs.field_type_order != rhs.field_type_order) {
9362 return lhs.field_type_order < rhs.field_type_order;
9363 } else {
9364 return lhs.field_index < rhs.field_index;
9365 }
9366 });
9367 // Primitive fields are now sorted by field size (descending), then type, then field index.
9368 DCHECK(std::is_sorted(
9369 sorted_fields.begin() + primitive_fields_start,
9370 sorted_fields.end(),
9371 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9372 ArtField* lhs_field = &fields->At(lhs.field_index);
9373 ArtField* rhs_field = &fields->At(rhs.field_index);
9374 Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
9375 CHECK_NE(lhs_type, Primitive::kPrimNot);
9376 Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
9377 CHECK_NE(rhs_type, Primitive::kPrimNot);
9378 if (lhs_type != rhs_type) {
9379 size_t lhs_size = Primitive::ComponentSize(lhs_type);
9380 size_t rhs_size = Primitive::ComponentSize(rhs_type);
9381 return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
9382 } else {
9383 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
9384 }
9385 }));
9386
9387 // Process reference fields.
9388 FieldGaps field_gaps;
9389 size_t index = 0u;
9390 if (num_reference_fields != 0u) {
9391 constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
9392 field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
9393 for (; index != num_reference_fields; ++index) {
9394 ArtField* field = &fields->At(sorted_fields[index].field_index);
9395 field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
9396 }
9397 }
9398 // Process 64-bit fields.
9399 if (index != num_fields &&
9400 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9401 field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
9402 while (index != num_fields &&
9403 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9404 ArtField* field = &fields->At(sorted_fields[index].field_index);
9405 field_offset = AssignFieldOffset<8u>(field, field_offset);
9406 ++index;
9407 }
9408 }
9409 // Process 32-bit fields.
9410 if (index != num_fields &&
9411 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9412 field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
9413 if (field_gaps.HasGap<4u>()) {
9414 ArtField* field = &fields->At(sorted_fields[index].field_index);
9415 AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>()); // Ignore return value.
9416 ++index;
9417 DCHECK(!field_gaps.HasGap<4u>()); // There can be only one gap for a 32-bit field.
9418 }
9419 while (index != num_fields &&
9420 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9421 ArtField* field = &fields->At(sorted_fields[index].field_index);
9422 field_offset = AssignFieldOffset<4u>(field, field_offset);
9423 ++index;
9424 }
9425 }
9426 // Process 16-bit fields.
9427 if (index != num_fields &&
9428 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9429 field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
9430 while (index != num_fields &&
9431 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
9432 field_gaps.HasGap<2u>()) {
9433 ArtField* field = &fields->At(sorted_fields[index].field_index);
9434 AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>()); // Ignore return value.
9435 ++index;
9436 }
9437 while (index != num_fields &&
9438 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9439 ArtField* field = &fields->At(sorted_fields[index].field_index);
9440 field_offset = AssignFieldOffset<2u>(field, field_offset);
9441 ++index;
9442 }
9443 }
9444 // Process 8-bit fields.
9445 for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
9446 ArtField* field = &fields->At(sorted_fields[index].field_index);
9447 AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>()); // Ignore return value.
9448 }
9449 for (; index != num_fields; ++index) {
9450 ArtField* field = &fields->At(sorted_fields[index].field_index);
9451 field_offset = AssignFieldOffset<1u>(field, field_offset);
9452 }
9453
9454 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
9455
9456 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
9457 DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
9458 if (!is_static &&
9459 UNLIKELY(!class_linker->init_done_) &&
9460 klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
9461 // We know there are no non-reference fields in the Reference classes, and we know
9462 // that 'referent' is alphabetically last, so this is easy...
9463 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
9464 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
9465 << klass->PrettyClass();
9466 --num_reference_fields;
9467 }
9468
9469 size_t size = field_offset.Uint32Value();
9470 // Update klass
9471 if (is_static) {
9472 klass->SetNumReferenceStaticFields(num_reference_fields);
9473 *class_size = size;
9474 } else {
9475 klass->SetNumReferenceInstanceFields(num_reference_fields);
9476 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9477 if (num_reference_fields == 0 || super_class == nullptr) {
9478 // object has one reference field, klass, but we ignore it since we always visit the class.
9479 // super_class is null iff the class is java.lang.Object.
9480 if (super_class == nullptr ||
9481 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
9482 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
9483 }
9484 }
9485 if (kIsDebugBuild) {
9486 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9487 size_t total_reference_instance_fields = 0;
9488 ObjPtr<mirror::Class> cur_super = klass.Get();
9489 while (cur_super != nullptr) {
9490 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9491 cur_super = cur_super->GetSuperClass();
9492 }
9493 if (super_class == nullptr) {
9494 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
9495 } else {
9496 // Check that there is at least num_reference_fields other than Object.class.
9497 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
9498 << klass->PrettyClass();
9499 }
9500 }
9501 if (!klass->IsVariableSize()) {
9502 std::string temp;
9503 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9504 size_t previous_size = klass->GetObjectSize();
9505 if (previous_size != 0) {
9506 // Make sure that we didn't originally have an incorrect size.
9507 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
9508 }
9509 klass->SetObjectSize(size);
9510 }
9511 }
9512
9513 if (kIsDebugBuild) {
9514 // Make sure that the fields array is ordered by name but all reference
9515 // offsets are at the beginning as far as alignment allows.
9516 MemberOffset start_ref_offset = is_static
9517 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
9518 : klass->GetFirstReferenceInstanceFieldOffset();
9519 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9520 num_reference_fields *
9521 sizeof(mirror::HeapReference<mirror::Object>));
9522 MemberOffset current_ref_offset = start_ref_offset;
9523 for (size_t i = 0; i < num_fields; i++) {
9524 ArtField* field = &fields->At(i);
9525 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
9526 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9527 << " offset=" << field->GetOffsetDuringLinking();
9528 if (i != 0) {
9529 ArtField* const prev_field = &fields->At(i - 1);
9530 // NOTE: The field names can be the same. This is not possible in the Java language
9531 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
9532 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
9533 }
9534 Primitive::Type type = field->GetTypeAsPrimitiveType();
9535 bool is_primitive = type != Primitive::kPrimNot;
9536 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9537 strcmp("referent", field->GetName()) == 0) {
9538 is_primitive = true; // We lied above, so we have to expect a lie here.
9539 }
9540 MemberOffset offset = field->GetOffsetDuringLinking();
9541 if (is_primitive) {
9542 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9543 // Shuffled before references.
9544 size_t type_size = Primitive::ComponentSize(type);
9545 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9546 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9547 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9548 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9549 }
9550 } else {
9551 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9552 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9553 sizeof(mirror::HeapReference<mirror::Object>));
9554 }
9555 }
9556 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9557 }
9558 return true;
9559 }
9560
LinkInstanceFields(Thread * self,Handle<mirror::Class> klass)9561 bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9562 CHECK(klass != nullptr);
9563 return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9564 }
9565
LinkStaticFields(Thread * self,Handle<mirror::Class> klass,size_t * class_size)9566 bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9567 CHECK(klass != nullptr);
9568 return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9569 }
9570
9571 enum class RecordElementType : uint8_t {
9572 kNames = 0,
9573 kTypes = 1,
9574 kSignatures = 2,
9575 kAnnotationVisibilities = 3,
9576 kAnnotations = 4
9577 };
9578
9579 static const char* kRecordElementNames[] = {"componentNames",
9580 "componentTypes",
9581 "componentSignatures",
9582 "componentAnnotationVisibilities",
9583 "componentAnnotations"};
9584
9585 class RecordAnnotationVisitor final : public annotations::AnnotationVisitor {
9586 public:
RecordAnnotationVisitor()9587 RecordAnnotationVisitor() {}
9588
ValidateCounts()9589 bool ValidateCounts() {
9590 if (is_error_) {
9591 return false;
9592 }
9593
9594 // Verify the counts.
9595 bool annotation_element_exists =
9596 (signatures_count_ != UINT32_MAX) || (annotations_count_ != UINT32_MAX);
9597 if (count_ >= 2) {
9598 SetErrorMsg("Record class can't have more than one @Record Annotation");
9599 } else if (names_count_ == UINT32_MAX) {
9600 SetErrorMsg("componentNames element is required");
9601 } else if (types_count_ == UINT32_MAX) {
9602 SetErrorMsg("componentTypes element is required");
9603 } else if (names_count_ != types_count_) { // Every component must have a name and a type.
9604 SetErrorMsg(StringPrintf(
9605 "componentTypes is expected to have %i, but has %i types", names_count_, types_count_));
9606 // The other 3 elements are optional, but is expected to have the same count if it exists.
9607 } else if (signatures_count_ != UINT32_MAX && signatures_count_ != names_count_) {
9608 SetErrorMsg(StringPrintf("componentSignatures size is %i, but is expected to be %i",
9609 signatures_count_,
9610 names_count_));
9611 } else if (annotation_element_exists && visibilities_count_ != names_count_) {
9612 SetErrorMsg(
9613 StringPrintf("componentAnnotationVisibilities size is %i, but is expected to be %i",
9614 visibilities_count_,
9615 names_count_));
9616 } else if (annotation_element_exists && annotations_count_ != names_count_) {
9617 SetErrorMsg(StringPrintf("componentAnnotations size is %i, but is expected to be %i",
9618 annotations_count_,
9619 names_count_));
9620 }
9621
9622 return !is_error_;
9623 }
9624
GetErrorMsg()9625 const std::string& GetErrorMsg() { return error_msg_; }
9626
IsRecordAnnotationFound()9627 bool IsRecordAnnotationFound() { return count_ != 0; }
9628
VisitAnnotation(const char * descriptor,uint8_t visibility)9629 annotations::VisitorStatus VisitAnnotation(const char* descriptor, uint8_t visibility) override {
9630 if (is_error_) {
9631 return annotations::VisitorStatus::kVisitBreak;
9632 }
9633
9634 if (visibility != DexFile::kDexVisibilitySystem) {
9635 return annotations::VisitorStatus::kVisitNext;
9636 }
9637
9638 if (strcmp(descriptor, "Ldalvik/annotation/Record;") != 0) {
9639 return annotations::VisitorStatus::kVisitNext;
9640 }
9641
9642 count_ += 1;
9643 if (count_ >= 2) {
9644 return annotations::VisitorStatus::kVisitBreak;
9645 }
9646 return annotations::VisitorStatus::kVisitInner;
9647 }
9648
VisitAnnotationElement(const char * element_name,uint8_t type,const JValue & value)9649 annotations::VisitorStatus VisitAnnotationElement(const char* element_name,
9650 uint8_t type,
9651 [[maybe_unused]] const JValue& value) override {
9652 if (is_error_) {
9653 return annotations::VisitorStatus::kVisitBreak;
9654 }
9655
9656 RecordElementType visiting_type;
9657 uint32_t* element_count;
9658 if (strcmp(element_name, "componentNames") == 0) {
9659 visiting_type = RecordElementType::kNames;
9660 element_count = &names_count_;
9661 } else if (strcmp(element_name, "componentTypes") == 0) {
9662 visiting_type = RecordElementType::kTypes;
9663 element_count = &types_count_;
9664 } else if (strcmp(element_name, "componentSignatures") == 0) {
9665 visiting_type = RecordElementType::kSignatures;
9666 element_count = &signatures_count_;
9667 } else if (strcmp(element_name, "componentAnnotationVisibilities") == 0) {
9668 visiting_type = RecordElementType::kAnnotationVisibilities;
9669 element_count = &visibilities_count_;
9670 } else if (strcmp(element_name, "componentAnnotations") == 0) {
9671 visiting_type = RecordElementType::kAnnotations;
9672 element_count = &annotations_count_;
9673 } else {
9674 // ignore this element that could be introduced in the future ART.
9675 return annotations::VisitorStatus::kVisitNext;
9676 }
9677
9678 if ((*element_count) != UINT32_MAX) {
9679 SetErrorMsg(StringPrintf("Two %s annotation elements are found but only one is expected",
9680 kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9681 return annotations::VisitorStatus::kVisitBreak;
9682 }
9683
9684 if (type != DexFile::kDexAnnotationArray) {
9685 SetErrorMsg(StringPrintf("%s must be array type", element_name));
9686 return annotations::VisitorStatus::kVisitBreak;
9687 }
9688
9689 *element_count = 0;
9690 visiting_type_ = visiting_type;
9691 return annotations::VisitorStatus::kVisitInner;
9692 }
9693
VisitArrayElement(uint8_t depth,uint32_t index,uint8_t type,const JValue & value)9694 annotations::VisitorStatus VisitArrayElement(uint8_t depth,
9695 uint32_t index,
9696 uint8_t type,
9697 [[maybe_unused]] const JValue& value) override {
9698 if (is_error_) {
9699 return annotations::VisitorStatus::kVisitBreak;
9700 }
9701 switch (visiting_type_) {
9702 case RecordElementType::kNames: {
9703 if (depth == 0) {
9704 if (!ExpectedTypeOrError(
9705 type, DexFile::kDexAnnotationString, visiting_type_, index, depth)) {
9706 return annotations::VisitorStatus::kVisitBreak;
9707 }
9708 names_count_++;
9709 return annotations::VisitorStatus::kVisitNext;
9710 }
9711 break;
9712 }
9713 case RecordElementType::kTypes: {
9714 if (depth == 0) {
9715 if (!ExpectedTypeOrError(
9716 type, DexFile::kDexAnnotationType, visiting_type_, index, depth)) {
9717 return annotations::VisitorStatus::kVisitBreak;
9718 }
9719 types_count_++;
9720 return annotations::VisitorStatus::kVisitNext;
9721 }
9722 break;
9723 }
9724 case RecordElementType::kSignatures: {
9725 if (depth == 0) {
9726 // kDexAnnotationNull implies no generic signature for the component.
9727 if (type != DexFile::kDexAnnotationNull &&
9728 !ExpectedTypeOrError(
9729 type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9730 return annotations::VisitorStatus::kVisitBreak;
9731 }
9732 signatures_count_++;
9733 return annotations::VisitorStatus::kVisitNext;
9734 }
9735 break;
9736 }
9737 case RecordElementType::kAnnotationVisibilities: {
9738 if (depth == 0) {
9739 if (!ExpectedTypeOrError(
9740 type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9741 return annotations::VisitorStatus::kVisitBreak;
9742 }
9743 visibilities_count_++;
9744 return annotations::VisitorStatus::kVisitInner;
9745 } else if (depth == 1) {
9746 if (!ExpectedTypeOrError(
9747 type, DexFile::kDexAnnotationByte, visiting_type_, index, depth)) {
9748 return annotations::VisitorStatus::kVisitBreak;
9749 }
9750 return annotations::VisitorStatus::kVisitNext;
9751 }
9752 break;
9753 }
9754 case RecordElementType::kAnnotations: {
9755 if (depth == 0) {
9756 if (!ExpectedTypeOrError(
9757 type, DexFile::kDexAnnotationArray, visiting_type_, index, depth)) {
9758 return annotations::VisitorStatus::kVisitBreak;
9759 }
9760 annotations_count_++;
9761 return annotations::VisitorStatus::kVisitInner;
9762 } else if (depth == 1) {
9763 if (!ExpectedTypeOrError(
9764 type, DexFile::kDexAnnotationAnnotation, visiting_type_, index, depth)) {
9765 return annotations::VisitorStatus::kVisitBreak;
9766 }
9767 return annotations::VisitorStatus::kVisitNext;
9768 }
9769 break;
9770 }
9771 }
9772
9773 // Should never happen if every next depth level is handled above whenever kVisitInner is
9774 // returned.
9775 DCHECK(false) << StringPrintf("Unexpected depth %i for element %s",
9776 depth,
9777 kRecordElementNames[static_cast<uint8_t>(visiting_type_)]);
9778 return annotations::VisitorStatus::kVisitBreak;
9779 }
9780
9781 private:
9782 bool is_error_ = false;
9783 uint32_t count_ = 0;
9784 uint32_t names_count_ = UINT32_MAX;
9785 uint32_t types_count_ = UINT32_MAX;
9786 uint32_t signatures_count_ = UINT32_MAX;
9787 uint32_t visibilities_count_ = UINT32_MAX;
9788 uint32_t annotations_count_ = UINT32_MAX;
9789 std::string error_msg_;
9790 RecordElementType visiting_type_;
9791
ExpectedTypeOrError(uint8_t type,uint8_t expected,RecordElementType visiting_type,uint8_t depth,uint32_t index)9792 inline bool ExpectedTypeOrError(uint8_t type,
9793 uint8_t expected,
9794 RecordElementType visiting_type,
9795 uint8_t depth,
9796 uint32_t index) {
9797 if (type == expected) {
9798 return true;
9799 }
9800
9801 SetErrorMsg(StringPrintf(
9802 "Expect 0x%02x type but got 0x%02x at the index %i and depth %i for the element %s",
9803 expected,
9804 type,
9805 index,
9806 depth,
9807 kRecordElementNames[static_cast<uint8_t>(visiting_type)]));
9808 return false;
9809 }
9810
SetErrorMsg(const std::string & msg)9811 void SetErrorMsg(const std::string& msg) {
9812 is_error_ = true;
9813 error_msg_ = msg;
9814 }
9815
9816 DISALLOW_COPY_AND_ASSIGN(RecordAnnotationVisitor);
9817 };
9818
9819 /**
9820 * Set kClassFlagRecord and verify if klass is a record class.
9821 * If the verification fails, a pending java exception is thrown.
9822 *
9823 * @return false if verification fails. If klass isn't a record class,
9824 * it should always return true.
9825 */
VerifyRecordClass(Handle<mirror::Class> klass,ObjPtr<mirror::Class> super)9826 bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super) {
9827 CHECK(klass != nullptr);
9828 // First, we check the conditions specified in java.lang.Class#isRecord().
9829 // If any of the conditions isn't fulfilled, it's not a record class and
9830 // ART should treat it as a normal class even if it's inherited from java.lang.Record.
9831 if (!klass->IsFinal()) {
9832 return true;
9833 }
9834
9835 if (super == nullptr) {
9836 return true;
9837 }
9838
9839 // Compare the string directly when this ClassLinker is initializing before
9840 // WellKnownClasses initializes
9841 if (WellKnownClasses::java_lang_Record == nullptr) {
9842 if (!super->DescriptorEquals("Ljava/lang/Record;")) {
9843 return true;
9844 }
9845 } else {
9846 ObjPtr<mirror::Class> java_lang_Record =
9847 WellKnownClasses::ToClass(WellKnownClasses::java_lang_Record);
9848 if (super.Ptr() != java_lang_Record.Ptr()) {
9849 return true;
9850 }
9851 }
9852
9853 // Verify @dalvik.annotation.Record
9854 // The annotation has a mandatory element componentNames[] and componentTypes[] of the same size.
9855 // componentSignatures[], componentAnnotationVisibilities[][], componentAnnotations[][] are
9856 // optional, but should have the same size if it exists.
9857 RecordAnnotationVisitor visitor;
9858 annotations::VisitClassAnnotations(klass, &visitor);
9859 if (!visitor.IsRecordAnnotationFound()) {
9860 return true;
9861 }
9862
9863 if (!visitor.ValidateCounts()) {
9864 ThrowClassFormatError(klass.Get(), "%s", visitor.GetErrorMsg().c_str());
9865 return false;
9866 }
9867
9868 // Set kClassFlagRecord.
9869 klass->SetRecordClass();
9870 return true;
9871 }
9872
9873 // Set the bitmap of reference instance field offsets.
CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)9874 void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
9875 uint32_t reference_offsets = 0;
9876 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
9877 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
9878 if (super_class != nullptr) {
9879 reference_offsets = super_class->GetReferenceInstanceOffsets();
9880 // Compute reference offsets unless our superclass overflowed.
9881 if (reference_offsets != mirror::Class::kClassWalkSuper) {
9882 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
9883 if (num_reference_fields != 0u) {
9884 // All of the fields that contain object references are guaranteed be grouped in memory
9885 // starting at an appropriately aligned address after super class object data.
9886 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
9887 sizeof(mirror::HeapReference<mirror::Object>));
9888 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
9889 sizeof(mirror::HeapReference<mirror::Object>);
9890 if (start_bit + num_reference_fields > 32) {
9891 reference_offsets = mirror::Class::kClassWalkSuper;
9892 } else {
9893 reference_offsets |= (0xffffffffu << start_bit) &
9894 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
9895 }
9896 }
9897 }
9898 }
9899 klass->SetReferenceInstanceOffsets(reference_offsets);
9900 }
9901
DoResolveString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9902 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9903 ObjPtr<mirror::DexCache> dex_cache) {
9904 StackHandleScope<1> hs(Thread::Current());
9905 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9906 return DoResolveString(string_idx, h_dex_cache);
9907 }
9908
DoResolveString(dex::StringIndex string_idx,Handle<mirror::DexCache> dex_cache)9909 ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9910 Handle<mirror::DexCache> dex_cache) {
9911 const DexFile& dex_file = *dex_cache->GetDexFile();
9912 uint32_t utf16_length;
9913 const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
9914 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
9915 if (string != nullptr) {
9916 dex_cache->SetResolvedString(string_idx, string);
9917 }
9918 return string;
9919 }
9920
DoLookupString(dex::StringIndex string_idx,ObjPtr<mirror::DexCache> dex_cache)9921 ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9922 ObjPtr<mirror::DexCache> dex_cache) {
9923 DCHECK(dex_cache != nullptr);
9924 const DexFile& dex_file = *dex_cache->GetDexFile();
9925 uint32_t utf16_length;
9926 const char* utf8_data = dex_file.GetStringDataAndUtf16Length(string_idx, &utf16_length);
9927 ObjPtr<mirror::String> string =
9928 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
9929 if (string != nullptr) {
9930 dex_cache->SetResolvedString(string_idx, string);
9931 }
9932 return string;
9933 }
9934
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::Class> referrer)9935 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9936 ObjPtr<mirror::Class> referrer) {
9937 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9938 }
9939
DoLookupResolvedType(dex::TypeIndex type_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)9940 ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
9941 ObjPtr<mirror::DexCache> dex_cache,
9942 ObjPtr<mirror::ClassLoader> class_loader) {
9943 DCHECK(dex_cache->GetClassLoader() == class_loader);
9944 const DexFile& dex_file = *dex_cache->GetDexFile();
9945 const char* descriptor = dex_file.GetTypeDescriptor(type_idx);
9946 ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9947 if (type != nullptr) {
9948 DCHECK(type->IsResolved());
9949 dex_cache->SetResolvedType(type_idx, type);
9950 }
9951 return type;
9952 }
9953
LookupResolvedType(const char * descriptor,ObjPtr<mirror::ClassLoader> class_loader)9954 ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
9955 ObjPtr<mirror::ClassLoader> class_loader) {
9956 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
9957 ObjPtr<mirror::Class> type = nullptr;
9958 if (descriptor[1] == '\0') {
9959 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9960 // for primitive classes that aren't backed by dex files.
9961 type = LookupPrimitiveClass(descriptor[0]);
9962 } else {
9963 Thread* const self = Thread::Current();
9964 DCHECK(self != nullptr);
9965 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9966 // Find the class in the loaded classes table.
9967 type = LookupClass(self, descriptor, hash, class_loader);
9968 }
9969 return (type != nullptr && type->IsResolved()) ? type : nullptr;
9970 }
9971
9972 template <typename RefType>
DoResolveType(dex::TypeIndex type_idx,RefType referrer)9973 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
9974 StackHandleScope<2> hs(Thread::Current());
9975 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9976 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9977 return DoResolveType(type_idx, dex_cache, class_loader);
9978 }
9979
9980 // Instantiate the above.
9981 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9982 ArtField* referrer);
9983 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9984 ArtMethod* referrer);
9985 template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9986 ObjPtr<mirror::Class> referrer);
9987
DoResolveType(dex::TypeIndex type_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)9988 ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9989 Handle<mirror::DexCache> dex_cache,
9990 Handle<mirror::ClassLoader> class_loader) {
9991 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
9992 Thread* self = Thread::Current();
9993 const char* descriptor = dex_cache->GetDexFile()->GetTypeDescriptor(type_idx);
9994 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
9995 if (resolved != nullptr) {
9996 // TODO: we used to throw here if resolved's class loader was not the
9997 // boot class loader. This was to permit different classes with the
9998 // same name to be loaded simultaneously by different loaders
9999 dex_cache->SetResolvedType(type_idx, resolved);
10000 } else {
10001 CHECK(self->IsExceptionPending())
10002 << "Expected pending exception for failed resolution of: " << descriptor;
10003 // Convert a ClassNotFoundException to a NoClassDefFoundError.
10004 StackHandleScope<1> hs(self);
10005 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
10006 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
10007 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
10008 self->ClearException();
10009 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
10010 self->GetException()->SetCause(cause.Get());
10011 }
10012 }
10013 DCHECK((resolved == nullptr) || resolved->IsResolved())
10014 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
10015 return resolved;
10016 }
10017
FindResolvedMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10018 ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
10019 ObjPtr<mirror::DexCache> dex_cache,
10020 ObjPtr<mirror::ClassLoader> class_loader,
10021 uint32_t method_idx) {
10022 DCHECK(dex_cache->GetClassLoader() == class_loader);
10023 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
10024 // functions can optimize the search if the dex_cache is the same as the DexCache
10025 // of the class, with fall-back to name and signature search otherwise.
10026 ArtMethod* resolved = nullptr;
10027 if (klass->IsInterface()) {
10028 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10029 } else {
10030 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10031 }
10032 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
10033 if (resolved != nullptr &&
10034 // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
10035 // access, as we'll be looking if the method can be accessed through an
10036 // interface.
10037 hiddenapi::ShouldDenyAccessToMember(resolved,
10038 hiddenapi::AccessContext(class_loader, dex_cache),
10039 hiddenapi::AccessMethod::kNone)) {
10040 // The resolved method that we have found cannot be accessed due to
10041 // hiddenapi (typically it is declared up the hierarchy and is not an SDK
10042 // method). Try to find an interface method from the implemented interfaces which is
10043 // part of the SDK.
10044 ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
10045 if (itf_method == nullptr) {
10046 // No interface method. Call ShouldDenyAccessToMember again but this time
10047 // with AccessMethod::kLinking to ensure that an appropriate warning is
10048 // logged.
10049 hiddenapi::ShouldDenyAccessToMember(resolved,
10050 hiddenapi::AccessContext(class_loader, dex_cache),
10051 hiddenapi::AccessMethod::kLinking);
10052 resolved = nullptr;
10053 } else {
10054 // We found an interface method that is accessible, continue with the resolved method.
10055 }
10056 }
10057 if (resolved != nullptr) {
10058 // In case of jmvti, the dex file gets verified before being registered, so first
10059 // check if it's registered before checking class tables.
10060 const DexFile& dex_file = *dex_cache->GetDexFile();
10061 DCHECK_IMPLIES(
10062 IsDexFileRegistered(Thread::Current(), dex_file),
10063 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
10064 << "DexFile referrer: " << dex_file.GetLocation()
10065 << " ClassLoader: " << DescribeLoaders(class_loader, "");
10066 // Be a good citizen and update the dex cache to speed subsequent calls.
10067 dex_cache->SetResolvedMethod(method_idx, resolved);
10068 // Disable the following invariant check as the verifier breaks it. b/73760543
10069 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
10070 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
10071 // << "Method: " << resolved->PrettyMethod() << ", "
10072 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
10073 // << "DexFile referrer: " << dex_file.GetLocation();
10074 }
10075 return resolved;
10076 }
10077
10078 // Returns true if `method` is either null or hidden.
10079 // Does not print any warnings if it is hidden.
CheckNoSuchMethod(ArtMethod * method,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)10080 static bool CheckNoSuchMethod(ArtMethod* method,
10081 ObjPtr<mirror::DexCache> dex_cache,
10082 ObjPtr<mirror::ClassLoader> class_loader)
10083 REQUIRES_SHARED(Locks::mutator_lock_) {
10084 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10085 return method == nullptr ||
10086 hiddenapi::ShouldDenyAccessToMember(method,
10087 hiddenapi::AccessContext(class_loader, dex_cache),
10088 hiddenapi::AccessMethod::kNone); // no warnings
10089 }
10090
FindIncompatibleMethod(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t method_idx)10091 ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
10092 ObjPtr<mirror::DexCache> dex_cache,
10093 ObjPtr<mirror::ClassLoader> class_loader,
10094 uint32_t method_idx) {
10095 DCHECK(dex_cache->GetClassLoader() == class_loader);
10096 if (klass->IsInterface()) {
10097 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
10098 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
10099 } else {
10100 // If there was an interface method with the same signature, we would have
10101 // found it in the "copied" methods. Only DCHECK that the interface method
10102 // really does not exist.
10103 if (kIsDebugBuild) {
10104 ArtMethod* method =
10105 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
10106 CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
10107 (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
10108 }
10109 return nullptr;
10110 }
10111 }
10112
ResolveMethodWithoutInvokeType(uint32_t method_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10113 ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
10114 Handle<mirror::DexCache> dex_cache,
10115 Handle<mirror::ClassLoader> class_loader) {
10116 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10117 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
10118 Thread::PoisonObjectPointersIfDebug();
10119 if (resolved != nullptr) {
10120 DCHECK(!resolved->IsRuntimeMethod());
10121 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
10122 return resolved;
10123 }
10124 // Fail, get the declaring class.
10125 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
10126 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
10127 if (klass == nullptr) {
10128 Thread::Current()->AssertPendingException();
10129 return nullptr;
10130 }
10131 return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
10132 }
10133
LookupResolvedField(uint32_t field_idx,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,bool is_static)10134 ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
10135 ObjPtr<mirror::DexCache> dex_cache,
10136 ObjPtr<mirror::ClassLoader> class_loader,
10137 bool is_static) {
10138 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10139 const DexFile& dex_file = *dex_cache->GetDexFile();
10140 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10141 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
10142 if (klass == nullptr) {
10143 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
10144 }
10145 if (klass == nullptr) {
10146 // The class has not been resolved yet, so the field is also unresolved.
10147 return nullptr;
10148 }
10149 DCHECK(klass->IsResolved());
10150
10151 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
10152 }
10153
ResolveFieldJLS(uint32_t field_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10154 ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
10155 Handle<mirror::DexCache> dex_cache,
10156 Handle<mirror::ClassLoader> class_loader) {
10157 DCHECK(dex_cache != nullptr);
10158 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10159 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
10160 Thread::PoisonObjectPointersIfDebug();
10161 if (resolved != nullptr) {
10162 return resolved;
10163 }
10164 const DexFile& dex_file = *dex_cache->GetDexFile();
10165 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
10166 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
10167 if (klass == nullptr) {
10168 DCHECK(Thread::Current()->IsExceptionPending());
10169 return nullptr;
10170 }
10171
10172 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
10173 if (resolved == nullptr) {
10174 const char* name = dex_file.GetFieldName(field_id);
10175 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
10176 ThrowNoSuchFieldError("", klass, type, name);
10177 }
10178 return resolved;
10179 }
10180
FindResolvedField(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx,bool is_static)10181 ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
10182 ObjPtr<mirror::DexCache> dex_cache,
10183 ObjPtr<mirror::ClassLoader> class_loader,
10184 uint32_t field_idx,
10185 bool is_static) {
10186 DCHECK(dex_cache->GetClassLoader() == class_loader);
10187 ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
10188 : klass->FindInstanceField(dex_cache, field_idx);
10189 if (resolved != nullptr &&
10190 hiddenapi::ShouldDenyAccessToMember(resolved,
10191 hiddenapi::AccessContext(class_loader, dex_cache),
10192 hiddenapi::AccessMethod::kLinking)) {
10193 resolved = nullptr;
10194 }
10195
10196 if (resolved != nullptr) {
10197 dex_cache->SetResolvedField(field_idx, resolved);
10198 }
10199
10200 return resolved;
10201 }
10202
FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader,uint32_t field_idx)10203 ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
10204 ObjPtr<mirror::DexCache> dex_cache,
10205 ObjPtr<mirror::ClassLoader> class_loader,
10206 uint32_t field_idx) {
10207 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
10208 ArtField* resolved = klass->FindField(dex_cache, field_idx);
10209
10210 if (resolved != nullptr &&
10211 hiddenapi::ShouldDenyAccessToMember(resolved,
10212 hiddenapi::AccessContext(class_loader, dex_cache),
10213 hiddenapi::AccessMethod::kLinking)) {
10214 resolved = nullptr;
10215 }
10216
10217 if (resolved != nullptr) {
10218 dex_cache->SetResolvedField(field_idx, resolved);
10219 }
10220
10221 return resolved;
10222 }
10223
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader)10224 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
10225 Thread* self,
10226 dex::ProtoIndex proto_idx,
10227 Handle<mirror::DexCache> dex_cache,
10228 Handle<mirror::ClassLoader> class_loader) {
10229 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10230 DCHECK(dex_cache != nullptr);
10231 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10232
10233 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
10234 if (resolved != nullptr) {
10235 return resolved;
10236 }
10237
10238 VariableSizedHandleScope raw_method_type_hs(self);
10239 mirror::RawMethodType raw_method_type(&raw_method_type_hs);
10240 if (!ResolveMethodType(self, proto_idx, dex_cache, class_loader, raw_method_type)) {
10241 DCHECK(self->IsExceptionPending());
10242 return nullptr;
10243 }
10244
10245 // The handle scope was filled with return type and paratemer types.
10246 DCHECK_EQ(raw_method_type_hs.Size(),
10247 dex_cache->GetDexFile()->GetShortyView(proto_idx).length());
10248 ObjPtr<mirror::MethodType> method_type = mirror::MethodType::Create(self, raw_method_type);
10249 if (method_type != nullptr) {
10250 // Ensure all stores for the newly created MethodType are visible, before we attempt to place
10251 // it in the DexCache (b/224733324).
10252 std::atomic_thread_fence(std::memory_order_release);
10253 dex_cache->SetResolvedMethodType(proto_idx, method_type.Ptr());
10254 }
10255 return method_type;
10256 }
10257
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,Handle<mirror::DexCache> dex_cache,Handle<mirror::ClassLoader> class_loader,mirror::RawMethodType method_type)10258 bool ClassLinker::ResolveMethodType(Thread* self,
10259 dex::ProtoIndex proto_idx,
10260 Handle<mirror::DexCache> dex_cache,
10261 Handle<mirror::ClassLoader> class_loader,
10262 /*out*/ mirror::RawMethodType method_type) {
10263 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
10264 DCHECK(dex_cache != nullptr);
10265 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
10266
10267 // First resolve the return type.
10268 const DexFile& dex_file = *dex_cache->GetDexFile();
10269 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
10270 ObjPtr<mirror::Class> return_type =
10271 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader);
10272 if (return_type == nullptr) {
10273 DCHECK(self->IsExceptionPending());
10274 return false;
10275 }
10276 method_type.SetRType(return_type);
10277
10278 // Then resolve the argument types.
10279 DexFileParameterIterator it(dex_file, proto_id);
10280 for (; it.HasNext(); it.Next()) {
10281 const dex::TypeIndex type_idx = it.GetTypeIdx();
10282 ObjPtr<mirror::Class> param_type = ResolveType(type_idx, dex_cache, class_loader);
10283 if (param_type == nullptr) {
10284 DCHECK(self->IsExceptionPending());
10285 return false;
10286 }
10287 method_type.AddPType(param_type);
10288 }
10289
10290 return true;
10291 }
10292
ResolveMethodType(Thread * self,dex::ProtoIndex proto_idx,ArtMethod * referrer)10293 ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
10294 dex::ProtoIndex proto_idx,
10295 ArtMethod* referrer) {
10296 StackHandleScope<2> hs(self);
10297 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
10298 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
10299 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
10300 }
10301
ResolveMethodHandleForField(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10302 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
10303 Thread* self,
10304 const dex::MethodHandleItem& method_handle,
10305 ArtMethod* referrer) {
10306 DexFile::MethodHandleType handle_type =
10307 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10308 mirror::MethodHandle::Kind kind;
10309 bool is_put;
10310 bool is_static;
10311 int32_t num_params;
10312 switch (handle_type) {
10313 case DexFile::MethodHandleType::kStaticPut: {
10314 kind = mirror::MethodHandle::Kind::kStaticPut;
10315 is_put = true;
10316 is_static = true;
10317 num_params = 1;
10318 break;
10319 }
10320 case DexFile::MethodHandleType::kStaticGet: {
10321 kind = mirror::MethodHandle::Kind::kStaticGet;
10322 is_put = false;
10323 is_static = true;
10324 num_params = 0;
10325 break;
10326 }
10327 case DexFile::MethodHandleType::kInstancePut: {
10328 kind = mirror::MethodHandle::Kind::kInstancePut;
10329 is_put = true;
10330 is_static = false;
10331 num_params = 2;
10332 break;
10333 }
10334 case DexFile::MethodHandleType::kInstanceGet: {
10335 kind = mirror::MethodHandle::Kind::kInstanceGet;
10336 is_put = false;
10337 is_static = false;
10338 num_params = 1;
10339 break;
10340 }
10341 case DexFile::MethodHandleType::kInvokeStatic:
10342 case DexFile::MethodHandleType::kInvokeInstance:
10343 case DexFile::MethodHandleType::kInvokeConstructor:
10344 case DexFile::MethodHandleType::kInvokeDirect:
10345 case DexFile::MethodHandleType::kInvokeInterface:
10346 LOG(FATAL) << "Unreachable";
10347 UNREACHABLE();
10348 }
10349
10350 ArtField* target_field =
10351 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
10352 if (LIKELY(target_field != nullptr)) {
10353 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
10354 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10355 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
10356 ThrowIllegalAccessErrorField(referring_class, target_field);
10357 return nullptr;
10358 }
10359 if (UNLIKELY(is_put && target_field->IsFinal())) {
10360 ThrowIllegalAccessErrorField(referring_class, target_field);
10361 return nullptr;
10362 }
10363 } else {
10364 DCHECK(Thread::Current()->IsExceptionPending());
10365 return nullptr;
10366 }
10367
10368 StackHandleScope<4> hs(self);
10369 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10370 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10371 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10372 if (UNLIKELY(method_params == nullptr)) {
10373 DCHECK(self->IsExceptionPending());
10374 return nullptr;
10375 }
10376
10377 Handle<mirror::Class> constructor_class;
10378 Handle<mirror::Class> return_type;
10379 switch (handle_type) {
10380 case DexFile::MethodHandleType::kStaticPut: {
10381 method_params->Set(0, target_field->ResolveType());
10382 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10383 break;
10384 }
10385 case DexFile::MethodHandleType::kStaticGet: {
10386 return_type = hs.NewHandle(target_field->ResolveType());
10387 break;
10388 }
10389 case DexFile::MethodHandleType::kInstancePut: {
10390 method_params->Set(0, target_field->GetDeclaringClass());
10391 method_params->Set(1, target_field->ResolveType());
10392 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
10393 break;
10394 }
10395 case DexFile::MethodHandleType::kInstanceGet: {
10396 method_params->Set(0, target_field->GetDeclaringClass());
10397 return_type = hs.NewHandle(target_field->ResolveType());
10398 break;
10399 }
10400 case DexFile::MethodHandleType::kInvokeStatic:
10401 case DexFile::MethodHandleType::kInvokeInstance:
10402 case DexFile::MethodHandleType::kInvokeConstructor:
10403 case DexFile::MethodHandleType::kInvokeDirect:
10404 case DexFile::MethodHandleType::kInvokeInterface:
10405 LOG(FATAL) << "Unreachable";
10406 UNREACHABLE();
10407 }
10408
10409 for (int32_t i = 0; i < num_params; ++i) {
10410 if (UNLIKELY(method_params->Get(i) == nullptr)) {
10411 DCHECK(self->IsExceptionPending());
10412 return nullptr;
10413 }
10414 }
10415
10416 if (UNLIKELY(return_type.IsNull())) {
10417 DCHECK(self->IsExceptionPending());
10418 return nullptr;
10419 }
10420
10421 Handle<mirror::MethodType>
10422 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10423 if (UNLIKELY(method_type.IsNull())) {
10424 DCHECK(self->IsExceptionPending());
10425 return nullptr;
10426 }
10427
10428 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
10429 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10430 }
10431
ResolveMethodHandleForMethod(Thread * self,const dex::MethodHandleItem & method_handle,ArtMethod * referrer)10432 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
10433 Thread* self,
10434 const dex::MethodHandleItem& method_handle,
10435 ArtMethod* referrer) {
10436 DexFile::MethodHandleType handle_type =
10437 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
10438 mirror::MethodHandle::Kind kind;
10439 uint32_t receiver_count = 0;
10440 ArtMethod* target_method = nullptr;
10441 switch (handle_type) {
10442 case DexFile::MethodHandleType::kStaticPut:
10443 case DexFile::MethodHandleType::kStaticGet:
10444 case DexFile::MethodHandleType::kInstancePut:
10445 case DexFile::MethodHandleType::kInstanceGet:
10446 LOG(FATAL) << "Unreachable";
10447 UNREACHABLE();
10448 case DexFile::MethodHandleType::kInvokeStatic: {
10449 kind = mirror::MethodHandle::Kind::kInvokeStatic;
10450 receiver_count = 0;
10451 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10452 method_handle.field_or_method_idx_,
10453 referrer,
10454 InvokeType::kStatic);
10455 break;
10456 }
10457 case DexFile::MethodHandleType::kInvokeInstance: {
10458 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
10459 receiver_count = 1;
10460 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10461 method_handle.field_or_method_idx_,
10462 referrer,
10463 InvokeType::kVirtual);
10464 break;
10465 }
10466 case DexFile::MethodHandleType::kInvokeConstructor: {
10467 // Constructors are currently implemented as a transform. They
10468 // are special cased later in this method.
10469 kind = mirror::MethodHandle::Kind::kInvokeTransform;
10470 receiver_count = 0;
10471 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10472 method_handle.field_or_method_idx_,
10473 referrer,
10474 InvokeType::kDirect);
10475 break;
10476 }
10477 case DexFile::MethodHandleType::kInvokeDirect: {
10478 kind = mirror::MethodHandle::Kind::kInvokeDirect;
10479 receiver_count = 1;
10480 StackHandleScope<2> hs(self);
10481 // A constant method handle with type kInvokeDirect can refer to
10482 // a method that is private or to a method in a super class. To
10483 // disambiguate the two options, we resolve the method ignoring
10484 // the invocation type to determine if the method is private. We
10485 // then resolve again specifying the intended invocation type to
10486 // force the appropriate checks.
10487 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
10488 hs.NewHandle(referrer->GetDexCache()),
10489 hs.NewHandle(referrer->GetClassLoader()));
10490 if (UNLIKELY(target_method == nullptr)) {
10491 break;
10492 }
10493
10494 if (target_method->IsPrivate()) {
10495 kind = mirror::MethodHandle::Kind::kInvokeDirect;
10496 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10497 method_handle.field_or_method_idx_,
10498 referrer,
10499 InvokeType::kDirect);
10500 } else {
10501 kind = mirror::MethodHandle::Kind::kInvokeSuper;
10502 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10503 method_handle.field_or_method_idx_,
10504 referrer,
10505 InvokeType::kSuper);
10506 if (UNLIKELY(target_method == nullptr)) {
10507 break;
10508 }
10509 // Find the method specified in the parent in referring class
10510 // so invoke-super invokes the method in the parent of the
10511 // referrer.
10512 target_method =
10513 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
10514 kRuntimePointerSize);
10515 }
10516 break;
10517 }
10518 case DexFile::MethodHandleType::kInvokeInterface: {
10519 kind = mirror::MethodHandle::Kind::kInvokeInterface;
10520 receiver_count = 1;
10521 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
10522 method_handle.field_or_method_idx_,
10523 referrer,
10524 InvokeType::kInterface);
10525 break;
10526 }
10527 }
10528
10529 if (UNLIKELY(target_method == nullptr)) {
10530 DCHECK(Thread::Current()->IsExceptionPending());
10531 return nullptr;
10532 }
10533
10534 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
10535 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
10536 uint32_t access_flags = target_method->GetAccessFlags();
10537 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
10538 ThrowIllegalAccessErrorMethod(referring_class, target_method);
10539 return nullptr;
10540 }
10541
10542 // Calculate the number of parameters from the method shorty. We add the
10543 // receiver count (0 or 1) and deduct one for the return value.
10544 uint32_t shorty_length;
10545 target_method->GetShorty(&shorty_length);
10546 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
10547
10548 StackHandleScope<5> hs(self);
10549 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
10550 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
10551 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
10552 if (method_params.Get() == nullptr) {
10553 DCHECK(self->IsExceptionPending());
10554 return nullptr;
10555 }
10556
10557 const DexFile* dex_file = referrer->GetDexFile();
10558 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
10559 int32_t index = 0;
10560 if (receiver_count != 0) {
10561 // Insert receiver. Use the class identified in the method handle rather than the declaring
10562 // class of the resolved method which may be super class or default interface method
10563 // (b/115964401).
10564 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
10565 // receiver_class should have been resolved when resolving the target method.
10566 DCHECK(receiver_class != nullptr);
10567 method_params->Set(index++, receiver_class);
10568 }
10569
10570 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
10571 DexFileParameterIterator it(*dex_file, proto_id);
10572 while (it.HasNext()) {
10573 DCHECK_LT(index, num_params);
10574 const dex::TypeIndex type_idx = it.GetTypeIdx();
10575 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
10576 if (nullptr == klass) {
10577 DCHECK(self->IsExceptionPending());
10578 return nullptr;
10579 }
10580 method_params->Set(index++, klass);
10581 it.Next();
10582 }
10583
10584 Handle<mirror::Class> return_type =
10585 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
10586 if (UNLIKELY(return_type.IsNull())) {
10587 DCHECK(self->IsExceptionPending());
10588 return nullptr;
10589 }
10590
10591 Handle<mirror::MethodType>
10592 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
10593 if (UNLIKELY(method_type.IsNull())) {
10594 DCHECK(self->IsExceptionPending());
10595 return nullptr;
10596 }
10597
10598 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
10599 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
10600 Handle<mirror::MethodHandlesLookup> lookup =
10601 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
10602 return lookup->FindConstructor(self, constructor_class, method_type);
10603 }
10604
10605 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
10606 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
10607 }
10608
ResolveMethodHandle(Thread * self,uint32_t method_handle_idx,ArtMethod * referrer)10609 ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
10610 uint32_t method_handle_idx,
10611 ArtMethod* referrer)
10612 REQUIRES_SHARED(Locks::mutator_lock_) {
10613 const DexFile* const dex_file = referrer->GetDexFile();
10614 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
10615 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
10616 case DexFile::MethodHandleType::kStaticPut:
10617 case DexFile::MethodHandleType::kStaticGet:
10618 case DexFile::MethodHandleType::kInstancePut:
10619 case DexFile::MethodHandleType::kInstanceGet:
10620 return ResolveMethodHandleForField(self, method_handle, referrer);
10621 case DexFile::MethodHandleType::kInvokeStatic:
10622 case DexFile::MethodHandleType::kInvokeInstance:
10623 case DexFile::MethodHandleType::kInvokeConstructor:
10624 case DexFile::MethodHandleType::kInvokeDirect:
10625 case DexFile::MethodHandleType::kInvokeInterface:
10626 return ResolveMethodHandleForMethod(self, method_handle, referrer);
10627 }
10628 }
10629
IsQuickResolutionStub(const void * entry_point) const10630 bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
10631 return (entry_point == GetQuickResolutionStub()) ||
10632 (quick_resolution_trampoline_ == entry_point);
10633 }
10634
IsQuickToInterpreterBridge(const void * entry_point) const10635 bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
10636 return (entry_point == GetQuickToInterpreterBridge()) ||
10637 (quick_to_interpreter_bridge_trampoline_ == entry_point);
10638 }
10639
IsQuickGenericJniStub(const void * entry_point) const10640 bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
10641 return (entry_point == GetQuickGenericJniStub()) ||
10642 (quick_generic_jni_trampoline_ == entry_point);
10643 }
10644
IsJniDlsymLookupStub(const void * entry_point) const10645 bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
10646 return entry_point == GetJniDlsymLookupStub() ||
10647 (jni_dlsym_lookup_trampoline_ == entry_point);
10648 }
10649
IsJniDlsymLookupCriticalStub(const void * entry_point) const10650 bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
10651 return entry_point == GetJniDlsymLookupCriticalStub() ||
10652 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
10653 }
10654
GetRuntimeQuickGenericJniStub() const10655 const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
10656 return GetQuickGenericJniStub();
10657 }
10658
SetEntryPointsForObsoleteMethod(ArtMethod * method) const10659 void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
10660 DCHECK(method->IsObsolete());
10661 // We cannot mess with the entrypoints of native methods because they are used to determine how
10662 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
10663 if (!method->IsNative()) {
10664 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
10665 }
10666 }
10667
DumpForSigQuit(std::ostream & os)10668 void ClassLinker::DumpForSigQuit(std::ostream& os) {
10669 ScopedObjectAccess soa(Thread::Current());
10670 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
10671 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
10672 << NumNonZygoteClasses() << "\n";
10673 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
10674 os << "Dumping registered class loaders\n";
10675 size_t class_loader_index = 0;
10676 for (const ClassLoaderData& class_loader : class_loaders_) {
10677 ObjPtr<mirror::ClassLoader> loader =
10678 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
10679 if (loader != nullptr) {
10680 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
10681 bool saw_one_dex_file = false;
10682 for (const auto& entry : dex_caches_) {
10683 const DexCacheData& dex_cache = entry.second;
10684 if (dex_cache.class_table == class_loader.class_table) {
10685 if (saw_one_dex_file) {
10686 os << ":";
10687 }
10688 saw_one_dex_file = true;
10689 os << entry.first->GetLocation();
10690 }
10691 }
10692 os << "]";
10693 bool found_parent = false;
10694 if (loader->GetParent() != nullptr) {
10695 size_t parent_index = 0;
10696 for (const ClassLoaderData& class_loader2 : class_loaders_) {
10697 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
10698 soa.Self()->DecodeJObject(class_loader2.weak_root));
10699 if (loader2 == loader->GetParent()) {
10700 os << ", parent #" << parent_index;
10701 found_parent = true;
10702 break;
10703 }
10704 parent_index++;
10705 }
10706 if (!found_parent) {
10707 os << ", unregistered parent of type "
10708 << loader->GetParent()->GetClass()->PrettyDescriptor();
10709 }
10710 } else {
10711 os << ", no parent";
10712 }
10713 os << "\n";
10714 }
10715 }
10716 os << "Done dumping class loaders\n";
10717 Runtime* runtime = Runtime::Current();
10718 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
10719 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
10720 }
10721
10722 class CountClassesVisitor : public ClassLoaderVisitor {
10723 public:
CountClassesVisitor()10724 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
10725
Visit(ObjPtr<mirror::ClassLoader> class_loader)10726 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
10727 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
10728 ClassTable* const class_table = class_loader->GetClassTable();
10729 if (class_table != nullptr) {
10730 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
10731 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
10732 }
10733 }
10734
10735 size_t num_zygote_classes;
10736 size_t num_non_zygote_classes;
10737 };
10738
NumZygoteClasses() const10739 size_t ClassLinker::NumZygoteClasses() const {
10740 CountClassesVisitor visitor;
10741 VisitClassLoaders(&visitor);
10742 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
10743 }
10744
NumNonZygoteClasses() const10745 size_t ClassLinker::NumNonZygoteClasses() const {
10746 CountClassesVisitor visitor;
10747 VisitClassLoaders(&visitor);
10748 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
10749 }
10750
NumLoadedClasses()10751 size_t ClassLinker::NumLoadedClasses() {
10752 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
10753 // Only return non zygote classes since these are the ones which apps which care about.
10754 return NumNonZygoteClasses();
10755 }
10756
GetClassesLockOwner()10757 pid_t ClassLinker::GetClassesLockOwner() {
10758 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
10759 }
10760
GetDexLockOwner()10761 pid_t ClassLinker::GetDexLockOwner() {
10762 return Locks::dex_lock_->GetExclusiveOwnerTid();
10763 }
10764
SetClassRoot(ClassRoot class_root,ObjPtr<mirror::Class> klass)10765 void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
10766 DCHECK(!init_done_);
10767
10768 DCHECK(klass != nullptr);
10769 DCHECK(klass->GetClassLoader() == nullptr);
10770
10771 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
10772 DCHECK(class_roots != nullptr);
10773 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
10774 int32_t index = static_cast<int32_t>(class_root);
10775 DCHECK(class_roots->Get(index) == nullptr);
10776 class_roots->Set<false>(index, klass);
10777 }
10778
CreateWellKnownClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files,Handle<mirror::Class> loader_class,Handle<mirror::ClassLoader> parent_loader,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after)10779 ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
10780 Thread* self,
10781 const std::vector<const DexFile*>& dex_files,
10782 Handle<mirror::Class> loader_class,
10783 Handle<mirror::ClassLoader> parent_loader,
10784 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10785 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
10786 CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
10787 loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
10788 loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
10789
10790 StackHandleScope<5> hs(self);
10791
10792 ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
10793
10794 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
10795 DCHECK(dex_elements_class != nullptr);
10796 DCHECK(dex_elements_class->IsArrayClass());
10797 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
10798 mirror::ObjectArray<mirror::Object>::Alloc(self,
10799 dex_elements_class.Get(),
10800 dex_files.size())));
10801 Handle<mirror::Class> h_dex_element_class =
10802 hs.NewHandle(dex_elements_class->GetComponentType());
10803
10804 ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
10805 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
10806
10807 ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
10808 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10809
10810 ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
10811 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
10812
10813 // Fill the elements array.
10814 int32_t index = 0;
10815 for (const DexFile* dex_file : dex_files) {
10816 StackHandleScope<4> hs2(self);
10817
10818 // CreateWellKnownClassLoader is only used by gtests and compiler.
10819 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
10820 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10821 self,
10822 kDexFileIndexStart + 1));
10823 DCHECK(h_long_array != nullptr);
10824 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
10825
10826 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10827 // FinalizerReference which will never get cleaned up without a started runtime.
10828 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
10829 cookie_field->GetDeclaringClass()->AllocObject(self));
10830 DCHECK(h_dex_file != nullptr);
10831 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
10832
10833 Handle<mirror::String> h_file_name = hs2.NewHandle(
10834 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
10835 DCHECK(h_file_name != nullptr);
10836 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10837
10838 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
10839 DCHECK(h_element != nullptr);
10840 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
10841
10842 h_dex_elements->Set(index, h_element.Get());
10843 index++;
10844 }
10845 DCHECK_EQ(index, h_dex_elements->GetLength());
10846
10847 // Create DexPathList.
10848 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
10849 dex_elements_field->GetDeclaringClass()->AllocObject(self));
10850 DCHECK(h_dex_path_list != nullptr);
10851 // Set elements.
10852 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
10853 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10854 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10855 // elements.
10856 {
10857 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10858 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10859 DCHECK(native_lib_dirs != nullptr);
10860 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10861 DCHECK(list_class != nullptr);
10862 {
10863 StackHandleScope<1> h_list_scope(self);
10864 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10865 bool list_init = EnsureInitialized(self, h_list_class, true, true);
10866 DCHECK(list_init);
10867 list_class = h_list_class.Get();
10868 }
10869 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10870 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10871 // is fine for testing. While it violates a Java-code invariant (the elementData field is
10872 // normally never null), as long as one does not try to add elements, this will still
10873 // work.
10874 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10875 }
10876
10877 // Create the class loader..
10878 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10879 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
10880 DCHECK(h_class_loader != nullptr);
10881 // Set DexPathList.
10882 ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
10883 DCHECK(path_list_field != nullptr);
10884 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
10885
10886 // Make a pretend boot-classpath.
10887 // TODO: Should we scan the image?
10888 ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
10889 DCHECK(parent_field != nullptr);
10890 if (parent_loader.Get() == nullptr) {
10891 ObjPtr<mirror::Object> boot_loader(
10892 WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
10893 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10894 } else {
10895 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10896 }
10897
10898 ArtField* shared_libraries_field =
10899 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
10900 DCHECK(shared_libraries_field != nullptr);
10901 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10902
10903 ArtField* shared_libraries_after_field =
10904 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
10905 DCHECK(shared_libraries_after_field != nullptr);
10906 shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10907 shared_libraries_after.Get());
10908 return h_class_loader.Get();
10909 }
10910
CreatePathClassLoader(Thread * self,const std::vector<const DexFile * > & dex_files)10911 jobject ClassLinker::CreatePathClassLoader(Thread* self,
10912 const std::vector<const DexFile*>& dex_files) {
10913 StackHandleScope<3u> hs(self);
10914 Handle<mirror::Class> d_s_pcl =
10915 hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
10916 auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
10917 auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
10918 ObjPtr<mirror::ClassLoader> class_loader =
10919 CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
10920 return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
10921 }
10922
DropFindArrayClassCache()10923 void ClassLinker::DropFindArrayClassCache() {
10924 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10925 find_array_class_cache_next_victim_ = 0;
10926 }
10927
VisitClassLoaders(ClassLoaderVisitor * visitor) const10928 void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
10929 Thread* const self = Thread::Current();
10930 for (const ClassLoaderData& data : class_loaders_) {
10931 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10932 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10933 self->DecodeJObject(data.weak_root));
10934 if (class_loader != nullptr) {
10935 visitor->Visit(class_loader);
10936 }
10937 }
10938 }
10939
VisitDexCaches(DexCacheVisitor * visitor) const10940 void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
10941 Thread* const self = Thread::Current();
10942 for (const auto& it : dex_caches_) {
10943 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10944 ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
10945 self->DecodeJObject(it.second.weak_root));
10946 if (dex_cache != nullptr) {
10947 visitor->Visit(dex_cache);
10948 }
10949 }
10950 }
10951
VisitAllocators(AllocatorVisitor * visitor) const10952 void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10953 for (const ClassLoaderData& data : class_loaders_) {
10954 LinearAlloc* alloc = data.allocator;
10955 if (alloc != nullptr && !visitor->Visit(alloc)) {
10956 break;
10957 }
10958 }
10959 }
10960
InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,ObjPtr<mirror::ClassLoader> class_loader)10961 void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10962 ObjPtr<mirror::ClassLoader> class_loader) {
10963 DCHECK(dex_file != nullptr);
10964 Thread* const self = Thread::Current();
10965 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10966 ClassTable* const table = ClassTableForClassLoader(class_loader);
10967 DCHECK(table != nullptr);
10968 if (table->InsertStrongRoot(dex_file)) {
10969 WriteBarrierOnClassLoaderLocked(class_loader, dex_file);
10970 } else {
10971 // Write-barrier not required if strong-root isn't inserted.
10972 }
10973 }
10974
CleanupClassLoaders()10975 void ClassLinker::CleanupClassLoaders() {
10976 Thread* const self = Thread::Current();
10977 std::list<ClassLoaderData> to_delete;
10978 // Do the delete outside the lock to avoid lock violation in jit code cache.
10979 {
10980 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10981 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
10982 auto this_it = it;
10983 ++it;
10984 const ClassLoaderData& data = *this_it;
10985 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10986 ObjPtr<mirror::ClassLoader> class_loader =
10987 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
10988 if (class_loader == nullptr) {
10989 VLOG(class_linker) << "Freeing class loader";
10990 to_delete.splice(to_delete.end(), class_loaders_, this_it);
10991 }
10992 }
10993 }
10994 if (to_delete.empty()) {
10995 return;
10996 }
10997 std::set<const OatFile*> unregistered_oat_files;
10998 JavaVMExt* vm = self->GetJniEnv()->GetVm();
10999 {
11000 WriterMutexLock mu(self, *Locks::dex_lock_);
11001 for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
11002 const DexFile* dex_file = it->first;
11003 const DexCacheData& data = it->second;
11004 if (self->DecodeJObject(data.weak_root) == nullptr) {
11005 DCHECK(to_delete.end() != std::find_if(
11006 to_delete.begin(),
11007 to_delete.end(),
11008 [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
11009 if (dex_file->GetOatDexFile() != nullptr &&
11010 dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
11011 dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
11012 unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
11013 }
11014 vm->DeleteWeakGlobalRef(self, data.weak_root);
11015 it = dex_caches_.erase(it);
11016 } else {
11017 ++it;
11018 }
11019 }
11020 }
11021 {
11022 ScopedDebugDisallowReadBarriers sddrb(self);
11023 for (ClassLoaderData& data : to_delete) {
11024 // CHA unloading analysis and SingleImplementaion cleanups are required.
11025 PrepareToDeleteClassLoader(self, data, /*cleanup_cha=*/true);
11026 }
11027 }
11028 for (const ClassLoaderData& data : to_delete) {
11029 delete data.allocator;
11030 delete data.class_table;
11031 }
11032 Runtime* runtime = Runtime::Current();
11033 if (!unregistered_oat_files.empty()) {
11034 for (const OatFile* oat_file : unregistered_oat_files) {
11035 // Notify the fault handler about removal of the executable code range if needed.
11036 DCHECK(oat_file->IsExecutable());
11037 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
11038 DCHECK_LE(exec_offset, oat_file->Size());
11039 size_t exec_size = oat_file->Size() - exec_offset;
11040 if (exec_size != 0u) {
11041 runtime->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
11042 }
11043 }
11044 }
11045
11046 if (runtime->GetStartupLinearAlloc() != nullptr) {
11047 // Because the startup linear alloc can contain dex cache arrays associated
11048 // to class loaders that got unloaded, we need to delete these
11049 // arrays.
11050 StartupCompletedTask::DeleteStartupDexCaches(self, /* called_by_gc= */ true);
11051 DCHECK_EQ(runtime->GetStartupLinearAlloc(), nullptr);
11052 }
11053 }
11054
11055 class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
11056 public:
FindVirtualMethodHolderVisitor(const ArtMethod * method,PointerSize pointer_size)11057 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
11058 : method_(method),
11059 pointer_size_(pointer_size) {}
11060
operator ()(ObjPtr<mirror::Class> klass)11061 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
11062 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
11063 holder_ = klass;
11064 }
11065 // Return false to stop searching if holder_ is not null.
11066 return holder_ == nullptr;
11067 }
11068
11069 ObjPtr<mirror::Class> holder_ = nullptr;
11070 const ArtMethod* const method_;
11071 const PointerSize pointer_size_;
11072 };
11073
GetHoldingClassOfCopiedMethod(ArtMethod * method)11074 ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
11075 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
11076 CHECK(method->IsCopied());
11077 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
11078 VisitClasses(&visitor);
11079 DCHECK(visitor.holder_ != nullptr);
11080 return visitor.holder_;
11081 }
11082
GetHoldingClassLoaderOfCopiedMethod(Thread * self,ArtMethod * method)11083 ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
11084 ArtMethod* method) {
11085 // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
11086 // the class loader, so we're using it only to verify the result in debug mode.
11087 CHECK(method->IsCopied());
11088 gc::Heap* heap = Runtime::Current()->GetHeap();
11089 // Check if the copied method is in the boot class path.
11090 if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
11091 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
11092 return nullptr;
11093 }
11094 // Check if the copied method is in an app image.
11095 // Note: Continuous spaces contain boot image spaces and app image spaces.
11096 // However, they are sorted by address, so boot images are not trivial to skip.
11097 ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
11098 DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
11099 for (gc::space::ContinuousSpace* space : spaces) {
11100 if (space->IsImageSpace()) {
11101 gc::space::ImageSpace* image_space = space->AsImageSpace();
11102 size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
11103 const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
11104 if (offset - methods_section.Offset() < methods_section.Size()) {
11105 // Grab the class loader from the first non-BCP class in the app image class table.
11106 // Note: If we allow classes from arbitrary parent or library class loaders in app
11107 // images, this shall need to be updated to actually search for the exact class.
11108 const ImageSection& class_table_section =
11109 image_space->GetImageHeader().GetClassTableSection();
11110 CHECK_NE(class_table_section.Size(), 0u);
11111 const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
11112 size_t read_count = 0;
11113 ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
11114 CHECK(!class_set.empty());
11115 auto it = class_set.begin();
11116 // No read barrier needed for references to non-movable image classes.
11117 while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
11118 ++it;
11119 CHECK(it != class_set.end());
11120 }
11121 ObjPtr<mirror::ClassLoader> class_loader =
11122 (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
11123 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
11124 return class_loader;
11125 }
11126 }
11127 }
11128 // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
11129 jweak result = nullptr;
11130 {
11131 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11132 for (const ClassLoaderData& data : class_loaders_) {
11133 if (data.allocator->Contains(method)) {
11134 result = data.weak_root;
11135 break;
11136 }
11137 }
11138 }
11139 CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
11140 << " " << method->PrettyMethod();
11141 // The `method` is alive, so the class loader must also be alive.
11142 return ObjPtr<mirror::ClassLoader>::DownCast(
11143 Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
11144 }
11145
DenyAccessBasedOnPublicSdk(ArtMethod * art_method) const11146 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtMethod* art_method) const
11147 REQUIRES_SHARED(Locks::mutator_lock_) {
11148 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11149 LOG(FATAL) << "UNREACHABLE";
11150 UNREACHABLE();
11151 }
11152
DenyAccessBasedOnPublicSdk(ArtField * art_field) const11153 bool ClassLinker::DenyAccessBasedOnPublicSdk([[maybe_unused]] ArtField* art_field) const
11154 REQUIRES_SHARED(Locks::mutator_lock_) {
11155 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11156 LOG(FATAL) << "UNREACHABLE";
11157 UNREACHABLE();
11158 }
11159
DenyAccessBasedOnPublicSdk(std::string_view type_descriptor) const11160 bool ClassLinker::DenyAccessBasedOnPublicSdk(
11161 [[maybe_unused]] std::string_view type_descriptor) const {
11162 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11163 LOG(FATAL) << "UNREACHABLE";
11164 UNREACHABLE();
11165 }
11166
SetEnablePublicSdkChecks(bool enabled)11167 void ClassLinker::SetEnablePublicSdkChecks([[maybe_unused]] bool enabled) {
11168 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11169 LOG(FATAL) << "UNREACHABLE";
11170 UNREACHABLE();
11171 }
11172
TransactionWriteConstraint(Thread * self,ObjPtr<mirror::Object> obj)11173 bool ClassLinker::TransactionWriteConstraint(
11174 [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> obj) {
11175 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11176 LOG(FATAL) << "UNREACHABLE";
11177 UNREACHABLE();
11178 }
11179
TransactionWriteValueConstraint(Thread * self,ObjPtr<mirror::Object> value)11180 bool ClassLinker::TransactionWriteValueConstraint(
11181 [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Object> value) {
11182 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11183 LOG(FATAL) << "UNREACHABLE";
11184 UNREACHABLE();
11185 }
11186
TransactionAllocationConstraint(Thread * self,ObjPtr<mirror::Class> klass)11187 bool ClassLinker::TransactionAllocationConstraint(
11188 [[maybe_unused]] Thread* self, [[maybe_unused]] ObjPtr<mirror::Class> klass) {
11189 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11190 LOG(FATAL) << "UNREACHABLE";
11191 UNREACHABLE();
11192 }
11193
RecordWriteFieldBoolean(mirror::Object * obj,MemberOffset field_offset,uint8_t value,bool is_volatile)11194 void ClassLinker::RecordWriteFieldBoolean([[maybe_unused]] mirror::Object* obj,
11195 [[maybe_unused]] MemberOffset field_offset,
11196 [[maybe_unused]] uint8_t value,
11197 [[maybe_unused]] bool is_volatile) {
11198 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11199 LOG(FATAL) << "UNREACHABLE";
11200 UNREACHABLE();
11201 }
11202
RecordWriteFieldByte(mirror::Object * obj,MemberOffset field_offset,int8_t value,bool is_volatile)11203 void ClassLinker::RecordWriteFieldByte([[maybe_unused]] mirror::Object* obj,
11204 [[maybe_unused]] MemberOffset field_offset,
11205 [[maybe_unused]] int8_t value,
11206 [[maybe_unused]] bool is_volatile) {
11207 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11208 LOG(FATAL) << "UNREACHABLE";
11209 UNREACHABLE();
11210 }
11211
RecordWriteFieldChar(mirror::Object * obj,MemberOffset field_offset,uint16_t value,bool is_volatile)11212 void ClassLinker::RecordWriteFieldChar([[maybe_unused]] mirror::Object* obj,
11213 [[maybe_unused]] MemberOffset field_offset,
11214 [[maybe_unused]] uint16_t value,
11215 [[maybe_unused]] bool is_volatile) {
11216 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11217 LOG(FATAL) << "UNREACHABLE";
11218 UNREACHABLE();
11219 }
11220
RecordWriteFieldShort(mirror::Object * obj,MemberOffset field_offset,int16_t value,bool is_volatile)11221 void ClassLinker::RecordWriteFieldShort([[maybe_unused]] mirror::Object* obj,
11222 [[maybe_unused]] MemberOffset field_offset,
11223 [[maybe_unused]] int16_t value,
11224 [[maybe_unused]] bool is_volatile) {
11225 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11226 LOG(FATAL) << "UNREACHABLE";
11227 UNREACHABLE();
11228 }
11229
RecordWriteField32(mirror::Object * obj,MemberOffset field_offset,uint32_t value,bool is_volatile)11230 void ClassLinker::RecordWriteField32([[maybe_unused]] mirror::Object* obj,
11231 [[maybe_unused]] MemberOffset field_offset,
11232 [[maybe_unused]] uint32_t value,
11233 [[maybe_unused]] bool is_volatile) {
11234 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11235 LOG(FATAL) << "UNREACHABLE";
11236 UNREACHABLE();
11237 }
11238
RecordWriteField64(mirror::Object * obj,MemberOffset field_offset,uint64_t value,bool is_volatile)11239 void ClassLinker::RecordWriteField64([[maybe_unused]] mirror::Object* obj,
11240 [[maybe_unused]] MemberOffset field_offset,
11241 [[maybe_unused]] uint64_t value,
11242 [[maybe_unused]] bool is_volatile) {
11243 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11244 LOG(FATAL) << "UNREACHABLE";
11245 UNREACHABLE();
11246 }
11247
RecordWriteFieldReference(mirror::Object * obj,MemberOffset field_offset,ObjPtr<mirror::Object> value,bool is_volatile)11248 void ClassLinker::RecordWriteFieldReference([[maybe_unused]] mirror::Object* obj,
11249 [[maybe_unused]] MemberOffset field_offset,
11250 [[maybe_unused]] ObjPtr<mirror::Object> value,
11251 [[maybe_unused]] bool is_volatile) {
11252 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11253 LOG(FATAL) << "UNREACHABLE";
11254 UNREACHABLE();
11255 }
11256
RecordWriteArray(mirror::Array * array,size_t index,uint64_t value)11257 void ClassLinker::RecordWriteArray([[maybe_unused]] mirror::Array* array,
11258 [[maybe_unused]] size_t index,
11259 [[maybe_unused]] uint64_t value) {
11260 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11261 LOG(FATAL) << "UNREACHABLE";
11262 UNREACHABLE();
11263 }
11264
RecordStrongStringInsertion(ObjPtr<mirror::String> s)11265 void ClassLinker::RecordStrongStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11266 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11267 LOG(FATAL) << "UNREACHABLE";
11268 UNREACHABLE();
11269 }
11270
RecordWeakStringInsertion(ObjPtr<mirror::String> s)11271 void ClassLinker::RecordWeakStringInsertion([[maybe_unused]] ObjPtr<mirror::String> s) {
11272 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11273 LOG(FATAL) << "UNREACHABLE";
11274 UNREACHABLE();
11275 }
11276
RecordStrongStringRemoval(ObjPtr<mirror::String> s)11277 void ClassLinker::RecordStrongStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11278 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11279 LOG(FATAL) << "UNREACHABLE";
11280 UNREACHABLE();
11281 }
11282
RecordWeakStringRemoval(ObjPtr<mirror::String> s)11283 void ClassLinker::RecordWeakStringRemoval([[maybe_unused]] ObjPtr<mirror::String> s) {
11284 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11285 LOG(FATAL) << "UNREACHABLE";
11286 UNREACHABLE();
11287 }
11288
RecordResolveString(ObjPtr<mirror::DexCache> dex_cache,dex::StringIndex string_idx)11289 void ClassLinker::RecordResolveString([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11290 [[maybe_unused]] dex::StringIndex string_idx) {
11291 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11292 LOG(FATAL) << "UNREACHABLE";
11293 UNREACHABLE();
11294 }
11295
RecordResolveMethodType(ObjPtr<mirror::DexCache> dex_cache,dex::ProtoIndex proto_idx)11296 void ClassLinker::RecordResolveMethodType([[maybe_unused]] ObjPtr<mirror::DexCache> dex_cache,
11297 [[maybe_unused]] dex::ProtoIndex proto_idx) {
11298 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11299 LOG(FATAL) << "UNREACHABLE";
11300 UNREACHABLE();
11301 }
11302
ThrowTransactionAbortError(Thread * self)11303 void ClassLinker::ThrowTransactionAbortError([[maybe_unused]] Thread* self) {
11304 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11305 LOG(FATAL) << "UNREACHABLE";
11306 UNREACHABLE();
11307 }
11308
AbortTransactionF(Thread * self,const char * fmt,...)11309 void ClassLinker::AbortTransactionF(
11310 [[maybe_unused]] Thread* self, [[maybe_unused]] const char* fmt, ...) {
11311 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11312 LOG(FATAL) << "UNREACHABLE";
11313 UNREACHABLE();
11314 }
11315
AbortTransactionV(Thread * self,const char * fmt,va_list args)11316 void ClassLinker::AbortTransactionV([[maybe_unused]] Thread* self,
11317 [[maybe_unused]] const char* fmt,
11318 [[maybe_unused]] va_list args) {
11319 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11320 LOG(FATAL) << "UNREACHABLE";
11321 UNREACHABLE();
11322 }
11323
IsTransactionAborted() const11324 bool ClassLinker::IsTransactionAborted() const {
11325 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
11326 LOG(FATAL) << "UNREACHABLE";
11327 UNREACHABLE();
11328 }
11329
VisitTransactionRoots(RootVisitor * visitor)11330 void ClassLinker::VisitTransactionRoots([[maybe_unused]] RootVisitor* visitor) {
11331 // Nothing to do for normal `ClassLinker`, only `AotClassLinker` handles transactions.
11332 }
11333
RemoveDexFromCaches(const DexFile & dex_file)11334 void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
11335 ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
11336
11337 auto it = dex_caches_.find(&dex_file);
11338 if (it != dex_caches_.end()) {
11339 dex_caches_.erase(it);
11340 }
11341 }
11342
11343 // GetClassLoadersVisitor collects visited class loaders.
11344 class GetClassLoadersVisitor : public ClassLoaderVisitor {
11345 public:
GetClassLoadersVisitor(VariableSizedHandleScope * class_loaders)11346 explicit GetClassLoadersVisitor(VariableSizedHandleScope* class_loaders)
11347 : class_loaders_(class_loaders) {}
11348
Visit(ObjPtr<mirror::ClassLoader> class_loader)11349 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
11350 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
11351 DCHECK(class_loader != nullptr);
11352 class_loaders_->NewHandle(class_loader);
11353 }
11354
11355 private:
11356 VariableSizedHandleScope* const class_loaders_;
11357 };
11358
GetClassLoaders(Thread * self,VariableSizedHandleScope * handles)11359 void ClassLinker::GetClassLoaders(Thread* self, VariableSizedHandleScope* handles) {
11360 GetClassLoadersVisitor class_loader_visitor(handles);
11361 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
11362 VisitClassLoaders(&class_loader_visitor);
11363 }
11364
11365 // Instantiate ClassLinker::AllocClass.
11366 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
11367 Thread* self,
11368 ObjPtr<mirror::Class> java_lang_Class,
11369 uint32_t class_size);
11370 template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
11371 Thread* self,
11372 ObjPtr<mirror::Class> java_lang_Class,
11373 uint32_t class_size);
11374
11375 } // namespace art
11376