1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "image_writer.h"
18 
19 #include <sys/stat.h>
20 #include <lz4.h>
21 #include <lz4hc.h>
22 
23 #include <memory>
24 #include <numeric>
25 #include <unordered_set>
26 #include <vector>
27 
28 #include "art_field-inl.h"
29 #include "art_method-inl.h"
30 #include "base/logging.h"
31 #include "base/unix_file/fd_file.h"
32 #include "class_linker-inl.h"
33 #include "compiled_method.h"
34 #include "dex_file-inl.h"
35 #include "dex_file_types.h"
36 #include "driver/compiler_driver.h"
37 #include "elf_file.h"
38 #include "elf_utils.h"
39 #include "elf_writer.h"
40 #include "gc/accounting/card_table-inl.h"
41 #include "gc/accounting/heap_bitmap.h"
42 #include "gc/accounting/space_bitmap-inl.h"
43 #include "gc/collector/concurrent_copying.h"
44 #include "gc/heap.h"
45 #include "gc/space/large_object_space.h"
46 #include "gc/space/space-inl.h"
47 #include "globals.h"
48 #include "image.h"
49 #include "imt_conflict_table.h"
50 #include "intern_table.h"
51 #include "jni_internal.h"
52 #include "linear_alloc.h"
53 #include "lock_word.h"
54 #include "mirror/array-inl.h"
55 #include "mirror/class-inl.h"
56 #include "mirror/class_ext.h"
57 #include "mirror/class_loader.h"
58 #include "mirror/dex_cache.h"
59 #include "mirror/dex_cache-inl.h"
60 #include "mirror/executable.h"
61 #include "mirror/method.h"
62 #include "mirror/object-inl.h"
63 #include "mirror/object-refvisitor-inl.h"
64 #include "mirror/object_array-inl.h"
65 #include "mirror/string-inl.h"
66 #include "oat.h"
67 #include "oat_file.h"
68 #include "oat_file_manager.h"
69 #include "runtime.h"
70 #include "scoped_thread_state_change-inl.h"
71 #include "handle_scope-inl.h"
72 #include "utils/dex_cache_arrays_layout-inl.h"
73 
74 using ::art::mirror::Class;
75 using ::art::mirror::DexCache;
76 using ::art::mirror::Object;
77 using ::art::mirror::ObjectArray;
78 using ::art::mirror::String;
79 
80 namespace art {
81 
82 // Separate objects into multiple bins to optimize dirty memory use.
83 static constexpr bool kBinObjects = true;
84 
85 // Return true if an object is already in an image space.
IsInBootImage(const void * obj) const86 bool ImageWriter::IsInBootImage(const void* obj) const {
87   gc::Heap* const heap = Runtime::Current()->GetHeap();
88   if (!compile_app_image_) {
89     DCHECK(heap->GetBootImageSpaces().empty());
90     return false;
91   }
92   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
93     const uint8_t* image_begin = boot_image_space->Begin();
94     // Real image end including ArtMethods and ArtField sections.
95     const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
96     if (image_begin <= obj && obj < image_end) {
97       return true;
98     }
99   }
100   return false;
101 }
102 
IsInBootOatFile(const void * ptr) const103 bool ImageWriter::IsInBootOatFile(const void* ptr) const {
104   gc::Heap* const heap = Runtime::Current()->GetHeap();
105   if (!compile_app_image_) {
106     DCHECK(heap->GetBootImageSpaces().empty());
107     return false;
108   }
109   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
110     const ImageHeader& image_header = boot_image_space->GetImageHeader();
111     if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
112       return true;
113     }
114   }
115   return false;
116 }
117 
ClearDexFileCookieCallback(Object * obj,void * arg ATTRIBUTE_UNUSED)118 static void ClearDexFileCookieCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
119     REQUIRES_SHARED(Locks::mutator_lock_) {
120   DCHECK(obj != nullptr);
121   Class* klass = obj->GetClass();
122   if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
123     ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
124     // Null out the cookie to enable determinism. b/34090128
125     field->SetObject</*kTransactionActive*/false>(obj, nullptr);
126   }
127 }
128 
ClearDexFileCookies()129 static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
130   Runtime::Current()->GetHeap()->VisitObjects(ClearDexFileCookieCallback, nullptr);
131 }
132 
PrepareImageAddressSpace()133 bool ImageWriter::PrepareImageAddressSpace() {
134   target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
135   gc::Heap* const heap = Runtime::Current()->GetHeap();
136   {
137     ScopedObjectAccess soa(Thread::Current());
138     PruneNonImageClasses();  // Remove junk
139     if (compile_app_image_) {
140       // Clear dex file cookies for app images to enable app image determinism. This is required
141       // since the cookie field contains long pointers to DexFiles which are not deterministic.
142       // b/34090128
143       ClearDexFileCookies();
144     } else {
145       // Avoid for app image since this may increase RAM and image size.
146       ComputeLazyFieldsForImageClasses();  // Add useful information
147     }
148   }
149   heap->CollectGarbage(false);  // Remove garbage.
150 
151   if (kIsDebugBuild) {
152     ScopedObjectAccess soa(Thread::Current());
153     CheckNonImageClassesRemoved();
154   }
155 
156   {
157     ScopedObjectAccess soa(Thread::Current());
158     CalculateNewObjectOffsets();
159   }
160 
161   // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
162   // bin size sums being calculated.
163   if (!AllocMemory()) {
164     return false;
165   }
166 
167   return true;
168 }
169 
Write(int image_fd,const std::vector<const char * > & image_filenames,const std::vector<const char * > & oat_filenames)170 bool ImageWriter::Write(int image_fd,
171                         const std::vector<const char*>& image_filenames,
172                         const std::vector<const char*>& oat_filenames) {
173   // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
174   // oat_filenames.
175   CHECK(!image_filenames.empty());
176   if (image_fd != kInvalidFd) {
177     CHECK_EQ(image_filenames.size(), 1u);
178   }
179   CHECK(!oat_filenames.empty());
180   CHECK_EQ(image_filenames.size(), oat_filenames.size());
181 
182   {
183     ScopedObjectAccess soa(Thread::Current());
184     for (size_t i = 0; i < oat_filenames.size(); ++i) {
185       CreateHeader(i);
186       CopyAndFixupNativeData(i);
187     }
188   }
189 
190   {
191     // TODO: heap validation can't handle these fix up passes.
192     ScopedObjectAccess soa(Thread::Current());
193     Runtime::Current()->GetHeap()->DisableObjectValidation();
194     CopyAndFixupObjects();
195   }
196 
197   for (size_t i = 0; i < image_filenames.size(); ++i) {
198     const char* image_filename = image_filenames[i];
199     ImageInfo& image_info = GetImageInfo(i);
200     std::unique_ptr<File> image_file;
201     if (image_fd != kInvalidFd) {
202       if (strlen(image_filename) == 0u) {
203         image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
204         // Empty the file in case it already exists.
205         if (image_file != nullptr) {
206           TEMP_FAILURE_RETRY(image_file->SetLength(0));
207           TEMP_FAILURE_RETRY(image_file->Flush());
208         }
209       } else {
210         LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
211       }
212     } else {
213       image_file.reset(OS::CreateEmptyFile(image_filename));
214     }
215 
216     if (image_file == nullptr) {
217       LOG(ERROR) << "Failed to open image file " << image_filename;
218       return false;
219     }
220 
221     if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
222       PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
223       image_file->Erase();
224       return EXIT_FAILURE;
225     }
226 
227     std::unique_ptr<char[]> compressed_data;
228     // Image data size excludes the bitmap and the header.
229     ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
230     const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
231     char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
232     size_t data_size;
233     const char* image_data_to_write;
234     const uint64_t compress_start_time = NanoTime();
235 
236     CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
237     switch (image_storage_mode_) {
238       case ImageHeader::kStorageModeLZ4HC:  // Fall-through.
239       case ImageHeader::kStorageModeLZ4: {
240         const size_t compressed_max_size = LZ4_compressBound(image_data_size);
241         compressed_data.reset(new char[compressed_max_size]);
242         data_size = LZ4_compress_default(
243             reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
244             &compressed_data[0],
245             image_data_size,
246             compressed_max_size);
247 
248         break;
249       }
250       /*
251        * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444
252       case ImageHeader::kStorageModeLZ4HC: {
253         // Bound is same as non HC.
254         const size_t compressed_max_size = LZ4_compressBound(image_data_size);
255         compressed_data.reset(new char[compressed_max_size]);
256         data_size = LZ4_compressHC(
257             reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
258             &compressed_data[0],
259             image_data_size);
260         break;
261       }
262       */
263       case ImageHeader::kStorageModeUncompressed: {
264         data_size = image_data_size;
265         image_data_to_write = image_data;
266         break;
267       }
268       default: {
269         LOG(FATAL) << "Unsupported";
270         UNREACHABLE();
271       }
272     }
273 
274     if (compressed_data != nullptr) {
275       image_data_to_write = &compressed_data[0];
276       VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
277                      << PrettyDuration(NanoTime() - compress_start_time);
278       if (kIsDebugBuild) {
279         std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]);
280         const size_t decompressed_size = LZ4_decompress_safe(
281             reinterpret_cast<char*>(&compressed_data[0]),
282             reinterpret_cast<char*>(&temp[0]),
283             data_size,
284             image_data_size);
285         CHECK_EQ(decompressed_size, image_data_size);
286         CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_;
287       }
288     }
289 
290     // Write out the image + fields + methods.
291     const bool is_compressed = compressed_data != nullptr;
292     if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
293       PLOG(ERROR) << "Failed to write image file data " << image_filename;
294       image_file->Erase();
295       return false;
296     }
297 
298     // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
299     // convenience.
300     const ImageSection& bitmap_section = image_header->GetImageSection(
301         ImageHeader::kSectionImageBitmap);
302     // Align up since data size may be unaligned if the image is compressed.
303     size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
304     if (!is_compressed) {
305       CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
306     }
307     if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
308                                  bitmap_section.Size(),
309                                  bitmap_position_in_file)) {
310       PLOG(ERROR) << "Failed to write image file " << image_filename;
311       image_file->Erase();
312       return false;
313     }
314 
315     int err = image_file->Flush();
316     if (err < 0) {
317       PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
318       image_file->Erase();
319       return false;
320     }
321 
322     // Write header last in case the compiler gets killed in the middle of image writing.
323     // We do not want to have a corrupted image with a valid header.
324     // The header is uncompressed since it contains whether the image is compressed or not.
325     image_header->data_size_ = data_size;
326     if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
327                                  sizeof(ImageHeader),
328                                  0)) {
329       PLOG(ERROR) << "Failed to write image file header " << image_filename;
330       image_file->Erase();
331       return false;
332     }
333 
334     CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
335              static_cast<size_t>(image_file->GetLength()));
336     if (image_file->FlushCloseOrErase() != 0) {
337       PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
338       return false;
339     }
340   }
341   return true;
342 }
343 
SetImageOffset(mirror::Object * object,size_t offset)344 void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
345   DCHECK(object != nullptr);
346   DCHECK_NE(offset, 0U);
347 
348   // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
349   object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
350   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
351   DCHECK(IsImageOffsetAssigned(object));
352 }
353 
UpdateImageOffset(mirror::Object * obj,uintptr_t offset)354 void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
355   DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
356   obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
357   DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
358 }
359 
AssignImageOffset(mirror::Object * object,ImageWriter::BinSlot bin_slot)360 void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
361   DCHECK(object != nullptr);
362   DCHECK_NE(image_objects_offset_begin_, 0u);
363 
364   size_t oat_index = GetOatIndex(object);
365   ImageInfo& image_info = GetImageInfo(oat_index);
366   size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()];
367   size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
368   DCHECK_ALIGNED(new_offset, kObjectAlignment);
369 
370   SetImageOffset(object, new_offset);
371   DCHECK_LT(new_offset, image_info.image_end_);
372 }
373 
IsImageOffsetAssigned(mirror::Object * object) const374 bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
375   // Will also return true if the bin slot was assigned since we are reusing the lock word.
376   DCHECK(object != nullptr);
377   return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
378 }
379 
GetImageOffset(mirror::Object * object) const380 size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
381   DCHECK(object != nullptr);
382   DCHECK(IsImageOffsetAssigned(object));
383   LockWord lock_word = object->GetLockWord(false);
384   size_t offset = lock_word.ForwardingAddress();
385   size_t oat_index = GetOatIndex(object);
386   const ImageInfo& image_info = GetImageInfo(oat_index);
387   DCHECK_LT(offset, image_info.image_end_);
388   return offset;
389 }
390 
SetImageBinSlot(mirror::Object * object,BinSlot bin_slot)391 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
392   DCHECK(object != nullptr);
393   DCHECK(!IsImageOffsetAssigned(object));
394   DCHECK(!IsImageBinSlotAssigned(object));
395 
396   // Before we stomp over the lock word, save the hash code for later.
397   LockWord lw(object->GetLockWord(false));
398   switch (lw.GetState()) {
399     case LockWord::kFatLocked: {
400       LOG(FATAL) << "Fat locked object " << object << " found during object copy";
401       break;
402     }
403     case LockWord::kThinLocked: {
404       LOG(FATAL) << "Thin locked object " << object << " found during object copy";
405       break;
406     }
407     case LockWord::kUnlocked:
408       // No hash, don't need to save it.
409       break;
410     case LockWord::kHashCode:
411       DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
412       saved_hashcode_map_.emplace(object, lw.GetHashCode());
413       break;
414     default:
415       LOG(FATAL) << "Unreachable.";
416       UNREACHABLE();
417   }
418   object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
419   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
420   DCHECK(IsImageBinSlotAssigned(object));
421 }
422 
PrepareDexCacheArraySlots()423 void ImageWriter::PrepareDexCacheArraySlots() {
424   // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
425   // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
426   // when AssignImageBinSlot() assigns their indexes out or order.
427   for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
428     auto it = dex_file_oat_index_map_.find(dex_file);
429     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
430     ImageInfo& image_info = GetImageInfo(it->second);
431     image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]);
432     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
433     image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size();
434   }
435 
436   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
437   Thread* const self = Thread::Current();
438   ReaderMutexLock mu(self, *Locks::dex_lock_);
439   for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
440     ObjPtr<mirror::DexCache> dex_cache =
441         ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
442     if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) {
443       continue;
444     }
445     const DexFile* dex_file = dex_cache->GetDexFile();
446     CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
447         << "Dex cache should have been pruned " << dex_file->GetLocation()
448         << "; possibly in class path";
449     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
450     DCHECK(layout.Valid());
451     size_t oat_index = GetOatIndexForDexCache(dex_cache);
452     ImageInfo& image_info = GetImageInfo(oat_index);
453     uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
454     DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
455     AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
456                                start + layout.TypesOffset(),
457                                dex_cache);
458     DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
459     AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
460                                start + layout.MethodsOffset(),
461                                dex_cache);
462     DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
463     AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
464                                start + layout.FieldsOffset(),
465                                dex_cache);
466     DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
467     AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
468 
469     if (dex_cache->GetResolvedMethodTypes() != nullptr) {
470       AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
471                                  start + layout.MethodTypesOffset(),
472                                  dex_cache);
473     }
474   }
475 }
476 
AddDexCacheArrayRelocation(void * array,size_t offset,ObjPtr<mirror::DexCache> dex_cache)477 void ImageWriter::AddDexCacheArrayRelocation(void* array,
478                                              size_t offset,
479                                              ObjPtr<mirror::DexCache> dex_cache) {
480   if (array != nullptr) {
481     DCHECK(!IsInBootImage(array));
482     size_t oat_index = GetOatIndexForDexCache(dex_cache);
483     native_object_relocations_.emplace(array,
484         NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray });
485   }
486 }
487 
AddMethodPointerArray(mirror::PointerArray * arr)488 void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
489   DCHECK(arr != nullptr);
490   if (kIsDebugBuild) {
491     for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
492       ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
493       if (method != nullptr && !method->IsRuntimeMethod()) {
494         mirror::Class* klass = method->GetDeclaringClass();
495         CHECK(klass == nullptr || KeepClass(klass))
496             << Class::PrettyClass(klass) << " should be a kept class";
497       }
498     }
499   }
500   // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
501   // ArtMethods.
502   pointer_arrays_.emplace(arr, kBinArtMethodClean);
503 }
504 
AssignImageBinSlot(mirror::Object * object,size_t oat_index)505 void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
506   DCHECK(object != nullptr);
507   size_t object_size = object->SizeOf();
508 
509   // The magic happens here. We segregate objects into different bins based
510   // on how likely they are to get dirty at runtime.
511   //
512   // Likely-to-dirty objects get packed together into the same bin so that
513   // at runtime their page dirtiness ratio (how many dirty objects a page has) is
514   // maximized.
515   //
516   // This means more pages will stay either clean or shared dirty (with zygote) and
517   // the app will use less of its own (private) memory.
518   Bin bin = kBinRegular;
519   size_t current_offset = 0u;
520 
521   if (kBinObjects) {
522     //
523     // Changing the bin of an object is purely a memory-use tuning.
524     // It has no change on runtime correctness.
525     //
526     // Memory analysis has determined that the following types of objects get dirtied
527     // the most:
528     //
529     // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
530     //   a fixed layout which helps improve generated code (using PC-relative addressing),
531     //   so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
532     //   Since these arrays are huge, most pages do not overlap other objects and it's not
533     //   really important where they are for the clean/dirty separation. Due to their
534     //   special PC-relative addressing, we arbitrarily keep them at the end.
535     // * Class'es which are verified [their clinit runs only at runtime]
536     //   - classes in general [because their static fields get overwritten]
537     //   - initialized classes with all-final statics are unlikely to be ever dirty,
538     //     so bin them separately
539     // * Art Methods that are:
540     //   - native [their native entry point is not looked up until runtime]
541     //   - have declaring classes that aren't initialized
542     //            [their interpreter/quick entry points are trampolines until the class
543     //             becomes initialized]
544     //
545     // We also assume the following objects get dirtied either never or extremely rarely:
546     //  * Strings (they are immutable)
547     //  * Art methods that aren't native and have initialized declared classes
548     //
549     // We assume that "regular" bin objects are highly unlikely to become dirtied,
550     // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
551     //
552     if (object->IsClass()) {
553       bin = kBinClassVerified;
554       mirror::Class* klass = object->AsClass();
555 
556       // Add non-embedded vtable to the pointer array table if there is one.
557       auto* vtable = klass->GetVTable();
558       if (vtable != nullptr) {
559         AddMethodPointerArray(vtable);
560       }
561       auto* iftable = klass->GetIfTable();
562       if (iftable != nullptr) {
563         for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
564           if (iftable->GetMethodArrayCount(i) > 0) {
565             AddMethodPointerArray(iftable->GetMethodArray(i));
566           }
567         }
568       }
569 
570       if (klass->GetStatus() == Class::kStatusInitialized) {
571         bin = kBinClassInitialized;
572 
573         // If the class's static fields are all final, put it into a separate bin
574         // since it's very likely it will stay clean.
575         uint32_t num_static_fields = klass->NumStaticFields();
576         if (num_static_fields == 0) {
577           bin = kBinClassInitializedFinalStatics;
578         } else {
579           // Maybe all the statics are final?
580           bool all_final = true;
581           for (uint32_t i = 0; i < num_static_fields; ++i) {
582             ArtField* field = klass->GetStaticField(i);
583             if (!field->IsFinal()) {
584               all_final = false;
585               break;
586             }
587           }
588 
589           if (all_final) {
590             bin = kBinClassInitializedFinalStatics;
591           }
592         }
593       }
594     } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
595       bin = kBinString;  // Strings are almost always immutable (except for object header).
596     } else if (object->GetClass<kVerifyNone>() ==
597         Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) {
598       // Instance of java lang object, probably a lock object. This means it will be dirty when we
599       // synchronize on it.
600       bin = kBinMiscDirty;
601     } else if (object->IsDexCache()) {
602       // Dex file field becomes dirty when the image is loaded.
603       bin = kBinMiscDirty;
604     }
605     // else bin = kBinRegular
606   }
607 
608   // Assign the oat index too.
609   DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
610   oat_index_map_.emplace(object, oat_index);
611 
612   ImageInfo& image_info = GetImageInfo(oat_index);
613 
614   size_t offset_delta = RoundUp(object_size, kObjectAlignment);  // 64-bit alignment
615   current_offset = image_info.bin_slot_sizes_[bin];  // How many bytes the current bin is at (aligned).
616   // Move the current bin size up to accommodate the object we just assigned a bin slot.
617   image_info.bin_slot_sizes_[bin] += offset_delta;
618 
619   BinSlot new_bin_slot(bin, current_offset);
620   SetImageBinSlot(object, new_bin_slot);
621 
622   ++image_info.bin_slot_count_[bin];
623 
624   // Grow the image closer to the end by the object we just assigned.
625   image_info.image_end_ += offset_delta;
626 }
627 
WillMethodBeDirty(ArtMethod * m) const628 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
629   if (m->IsNative()) {
630     return true;
631   }
632   mirror::Class* declaring_class = m->GetDeclaringClass();
633   // Initialized is highly unlikely to dirty since there's no entry points to mutate.
634   return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
635 }
636 
IsImageBinSlotAssigned(mirror::Object * object) const637 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
638   DCHECK(object != nullptr);
639 
640   // We always stash the bin slot into a lockword, in the 'forwarding address' state.
641   // If it's in some other state, then we haven't yet assigned an image bin slot.
642   if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
643     return false;
644   } else if (kIsDebugBuild) {
645     LockWord lock_word = object->GetLockWord(false);
646     size_t offset = lock_word.ForwardingAddress();
647     BinSlot bin_slot(offset);
648     size_t oat_index = GetOatIndex(object);
649     const ImageInfo& image_info = GetImageInfo(oat_index);
650     DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()])
651         << "bin slot offset should not exceed the size of that bin";
652   }
653   return true;
654 }
655 
GetImageBinSlot(mirror::Object * object) const656 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
657   DCHECK(object != nullptr);
658   DCHECK(IsImageBinSlotAssigned(object));
659 
660   LockWord lock_word = object->GetLockWord(false);
661   size_t offset = lock_word.ForwardingAddress();  // TODO: ForwardingAddress should be uint32_t
662   DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
663 
664   BinSlot bin_slot(static_cast<uint32_t>(offset));
665   size_t oat_index = GetOatIndex(object);
666   const ImageInfo& image_info = GetImageInfo(oat_index);
667   DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]);
668 
669   return bin_slot;
670 }
671 
AllocMemory()672 bool ImageWriter::AllocMemory() {
673   for (ImageInfo& image_info : image_infos_) {
674     ImageSection unused_sections[ImageHeader::kSectionCount];
675     const size_t length = RoundUp(
676         image_info.CreateImageSections(unused_sections), kPageSize);
677 
678     std::string error_msg;
679     image_info.image_.reset(MemMap::MapAnonymous("image writer image",
680                                                  nullptr,
681                                                  length,
682                                                  PROT_READ | PROT_WRITE,
683                                                  false,
684                                                  false,
685                                                  &error_msg));
686     if (UNLIKELY(image_info.image_.get() == nullptr)) {
687       LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
688       return false;
689     }
690 
691     // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
692     CHECK_LE(image_info.image_end_, length);
693     image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
694         "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
695     if (image_info.image_bitmap_.get() == nullptr) {
696       LOG(ERROR) << "Failed to allocate memory for image bitmap";
697       return false;
698     }
699   }
700   return true;
701 }
702 
703 class ImageWriter::ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
704  public:
operator ()(ObjPtr<Class> c)705   bool operator()(ObjPtr<Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
706     StackHandleScope<1> hs(Thread::Current());
707     mirror::Class::ComputeName(hs.NewHandle(c));
708     return true;
709   }
710 };
711 
ComputeLazyFieldsForImageClasses()712 void ImageWriter::ComputeLazyFieldsForImageClasses() {
713   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
714   ComputeLazyFieldsForClassesVisitor visitor;
715   class_linker->VisitClassesWithoutClassesLock(&visitor);
716 }
717 
IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)718 static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
719     REQUIRES_SHARED(Locks::mutator_lock_) {
720   return klass->GetClassLoader() == nullptr;
721 }
722 
IsBootClassLoaderNonImageClass(mirror::Class * klass)723 bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
724   return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
725 }
726 
PruneAppImageClass(ObjPtr<mirror::Class> klass)727 bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) {
728   bool early_exit = false;
729   std::unordered_set<mirror::Class*> visited;
730   return PruneAppImageClassInternal(klass, &early_exit, &visited);
731 }
732 
PruneAppImageClassInternal(ObjPtr<mirror::Class> klass,bool * early_exit,std::unordered_set<mirror::Class * > * visited)733 bool ImageWriter::PruneAppImageClassInternal(
734     ObjPtr<mirror::Class> klass,
735     bool* early_exit,
736     std::unordered_set<mirror::Class*>* visited) {
737   DCHECK(early_exit != nullptr);
738   DCHECK(visited != nullptr);
739   DCHECK(compile_app_image_);
740   if (klass == nullptr || IsInBootImage(klass.Ptr())) {
741     return false;
742   }
743   auto found = prune_class_memo_.find(klass.Ptr());
744   if (found != prune_class_memo_.end()) {
745     // Already computed, return the found value.
746     return found->second;
747   }
748   // Circular dependencies, return false but do not store the result in the memoization table.
749   if (visited->find(klass.Ptr()) != visited->end()) {
750     *early_exit = true;
751     return false;
752   }
753   visited->emplace(klass.Ptr());
754   bool result = IsBootClassLoaderClass(klass);
755   std::string temp;
756   // Prune if not an image class, this handles any broken sets of image classes such as having a
757   // class in the set but not it's superclass.
758   result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
759   bool my_early_exit = false;  // Only for ourselves, ignore caller.
760   // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
761   // app image.
762   if (klass->IsErroneous()) {
763     result = true;
764   } else {
765     ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
766     CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass();
767   }
768   if (!result) {
769     // Check interfaces since these wont be visited through VisitReferences.)
770     mirror::IfTable* if_table = klass->GetIfTable();
771     for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
772       result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
773                                                     &my_early_exit,
774                                                     visited);
775     }
776   }
777   if (klass->IsObjectArrayClass()) {
778     result = result || PruneAppImageClassInternal(klass->GetComponentType(),
779                                                   &my_early_exit,
780                                                   visited);
781   }
782   // Check static fields and their classes.
783   if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
784     size_t num_static_fields = klass->NumReferenceStaticFields();
785     // Presumably GC can happen when we are cross compiling, it should not cause performance
786     // problems to do pointer size logic.
787     MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
788         Runtime::Current()->GetClassLinker()->GetImagePointerSize());
789     for (size_t i = 0u; i < num_static_fields; ++i) {
790       mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
791       if (ref != nullptr) {
792         if (ref->IsClass()) {
793           result = result || PruneAppImageClassInternal(ref->AsClass(),
794                                                         &my_early_exit,
795                                                         visited);
796         } else {
797           result = result || PruneAppImageClassInternal(ref->GetClass(),
798                                                         &my_early_exit,
799                                                         visited);
800         }
801       }
802       field_offset = MemberOffset(field_offset.Uint32Value() +
803                                   sizeof(mirror::HeapReference<mirror::Object>));
804     }
805   }
806   result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
807                                                 &my_early_exit,
808                                                 visited);
809   // Remove the class if the dex file is not in the set of dex files. This happens for classes that
810   // are from uses library if there is no profile. b/30688277
811   mirror::DexCache* dex_cache = klass->GetDexCache();
812   if (dex_cache != nullptr) {
813     result = result ||
814         dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
815   }
816   // Erase the element we stored earlier since we are exiting the function.
817   auto it = visited->find(klass.Ptr());
818   DCHECK(it != visited->end());
819   visited->erase(it);
820   // Only store result if it is true or none of the calls early exited due to circular
821   // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
822   // a child call and we can remember the result.
823   if (result == true || !my_early_exit || visited->empty()) {
824     prune_class_memo_[klass.Ptr()] = result;
825   }
826   *early_exit |= my_early_exit;
827   return result;
828 }
829 
KeepClass(ObjPtr<mirror::Class> klass)830 bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
831   if (klass == nullptr) {
832     return false;
833   }
834   if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
835     // Already in boot image, return true.
836     return true;
837   }
838   std::string temp;
839   if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
840     return false;
841   }
842   if (compile_app_image_) {
843     // For app images, we need to prune boot loader classes that are not in the boot image since
844     // these may have already been loaded when the app image is loaded.
845     // Keep classes in the boot image space since we don't want to re-resolve these.
846     return !PruneAppImageClass(klass);
847   }
848   return true;
849 }
850 
851 class ImageWriter::PruneClassesVisitor : public ClassVisitor {
852  public:
PruneClassesVisitor(ImageWriter * image_writer,ObjPtr<mirror::ClassLoader> class_loader)853   PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
854       : image_writer_(image_writer),
855         class_loader_(class_loader),
856         classes_to_prune_(),
857         defined_class_count_(0u) { }
858 
operator ()(ObjPtr<mirror::Class> klass)859   bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
860     if (!image_writer_->KeepClass(klass.Ptr())) {
861       classes_to_prune_.insert(klass.Ptr());
862       if (klass->GetClassLoader() == class_loader_) {
863         ++defined_class_count_;
864       }
865     }
866     return true;
867   }
868 
Prune()869   size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
870     ClassTable* class_table =
871         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
872     for (mirror::Class* klass : classes_to_prune_) {
873       std::string storage;
874       const char* descriptor = klass->GetDescriptor(&storage);
875       bool result = class_table->Remove(descriptor);
876       DCHECK(result);
877       DCHECK(!class_table->Remove(descriptor)) << descriptor;
878     }
879     return defined_class_count_;
880   }
881 
882  private:
883   ImageWriter* const image_writer_;
884   const ObjPtr<mirror::ClassLoader> class_loader_;
885   std::unordered_set<mirror::Class*> classes_to_prune_;
886   size_t defined_class_count_;
887 };
888 
889 class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
890  public:
PruneClassLoaderClassesVisitor(ImageWriter * image_writer)891   explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
892       : image_writer_(image_writer), removed_class_count_(0) {}
893 
Visit(ObjPtr<mirror::ClassLoader> class_loader)894   virtual void Visit(ObjPtr<mirror::ClassLoader> class_loader) OVERRIDE
895       REQUIRES_SHARED(Locks::mutator_lock_) {
896     PruneClassesVisitor classes_visitor(image_writer_, class_loader);
897     ClassTable* class_table =
898         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
899     class_table->Visit(classes_visitor);
900     removed_class_count_ += classes_visitor.Prune();
901 
902     // Record app image class loader. The fake boot class loader should not get registered
903     // and we should end up with only one class loader for an app and none for boot image.
904     if (class_loader != nullptr && class_table != nullptr) {
905       DCHECK(class_loader_ == nullptr);
906       class_loader_ = class_loader;
907     }
908   }
909 
GetRemovedClassCount() const910   size_t GetRemovedClassCount() const {
911     return removed_class_count_;
912   }
913 
GetClassLoader() const914   ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) {
915     return class_loader_;
916   }
917 
918  private:
919   ImageWriter* const image_writer_;
920   size_t removed_class_count_;
921   ObjPtr<mirror::ClassLoader> class_loader_;
922 };
923 
VisitClassLoaders(ClassLoaderVisitor * visitor)924 void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
925   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
926   visitor->Visit(nullptr);  // Visit boot class loader.
927   Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
928 }
929 
PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)930 void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
931                                           ObjPtr<mirror::ClassLoader> class_loader) {
932   // To ensure deterministic contents of the hash-based arrays, each slot shall contain
933   // the candidate with the lowest index. As we're processing entries in increasing index
934   // order, this means trying to look up the entry for the current index if the slot is
935   // empty or if it contains a higher index.
936 
937   Runtime* runtime = Runtime::Current();
938   ClassLinker* class_linker = runtime->GetClassLinker();
939   ArtMethod* resolution_method = runtime->GetResolutionMethod();
940   const DexFile& dex_file = *dex_cache->GetDexFile();
941   // Prune methods.
942   ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
943   for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
944     ArtMethod* method =
945         mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
946     DCHECK(method != nullptr) << "Expected resolution method instead of null method";
947     mirror::Class* declaring_class = method->GetDeclaringClass();
948     // Copied methods may be held live by a class which was not an image class but have a
949     // declaring class which is an image class. Set it to the resolution method to be safe and
950     // prevent dangling pointers.
951     if (method->IsCopied() || !KeepClass(declaring_class)) {
952       mirror::DexCache::SetElementPtrSize(resolved_methods,
953                                           i,
954                                           resolution_method,
955                                           target_ptr_size_);
956     } else if (kIsDebugBuild) {
957       // Check that the class is still in the classes table.
958       ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
959       CHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
960           << Class::PrettyClass(declaring_class) << " not in class linker table";
961     }
962   }
963   // Prune fields and make the contents of the field array deterministic.
964   mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
965   dex::TypeIndex last_class_idx;  // Initialized to invalid index.
966   ObjPtr<mirror::Class> last_class = nullptr;
967   for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
968     uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
969     auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
970     uint32_t stored_index = pair.index;
971     ArtField* field = pair.object;
972     if (field != nullptr && i > stored_index) {
973       continue;  // Already checked.
974     }
975     // Check if the referenced class is in the image. Note that we want to check the referenced
976     // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
977     // results in resolving the referenced class and that can for example throw OOME.
978     const DexFile::FieldId& field_id = dex_file.GetFieldId(i);
979     if (field_id.class_idx_ != last_class_idx) {
980       last_class_idx = field_id.class_idx_;
981       last_class = class_linker->LookupResolvedType(
982           dex_file, last_class_idx, dex_cache, class_loader);
983       if (last_class != nullptr && !KeepClass(last_class)) {
984         last_class = nullptr;
985       }
986     }
987     if (field == nullptr || i < stored_index) {
988       if (last_class != nullptr) {
989         const char* name = dex_file.StringDataByIdx(field_id.name_idx_);
990         const char* type = dex_file.StringByTypeIdx(field_id.type_idx_);
991         field = mirror::Class::FindField(Thread::Current(), last_class, name, type);
992         if (field != nullptr) {
993           // If the referenced class is in the image, the defining class must also be there.
994           DCHECK(KeepClass(field->GetDeclaringClass()));
995           dex_cache->SetResolvedField(i, field, target_ptr_size_);
996         }
997       }
998     } else {
999       DCHECK_EQ(i, stored_index);
1000       if (last_class == nullptr) {
1001         dex_cache->ClearResolvedField(stored_index, target_ptr_size_);
1002       }
1003     }
1004   }
1005   // Prune types and make the contents of the type array deterministic.
1006   // This is done after fields and methods as their lookup can touch the types array.
1007   for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
1008     dex::TypeIndex type_idx(i);
1009     uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
1010     mirror::TypeDexCachePair pair =
1011         dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
1012     uint32_t stored_index = pair.index;
1013     ObjPtr<mirror::Class> klass = pair.object.Read();
1014     if (klass == nullptr || i < stored_index) {
1015       klass = class_linker->LookupResolvedType(dex_file, type_idx, dex_cache, class_loader);
1016       if (klass != nullptr) {
1017         DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass);
1018         stored_index = i;  // For correct clearing below if not keeping the `klass`.
1019       }
1020     } else if (i == stored_index && !KeepClass(klass)) {
1021       dex_cache->ClearResolvedType(dex::TypeIndex(stored_index));
1022     }
1023   }
1024   // Strings do not need pruning, but the contents of the string array must be deterministic.
1025   for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
1026     dex::StringIndex string_idx(i);
1027     uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
1028     mirror::StringDexCachePair pair =
1029         dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
1030     uint32_t stored_index = pair.index;
1031     ObjPtr<mirror::String> string = pair.object.Read();
1032     if (string == nullptr || i < stored_index) {
1033       string = class_linker->LookupString(dex_file, string_idx, dex_cache);
1034       DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
1035     }
1036   }
1037 }
1038 
PruneNonImageClasses()1039 void ImageWriter::PruneNonImageClasses() {
1040   Runtime* runtime = Runtime::Current();
1041   ClassLinker* class_linker = runtime->GetClassLinker();
1042   Thread* self = Thread::Current();
1043   ScopedAssertNoThreadSuspension sa(__FUNCTION__);
1044 
1045   // Clear class table strong roots so that dex caches can get pruned. We require pruning the class
1046   // path dex caches.
1047   class_linker->ClearClassTableStrongRoots();
1048 
1049   // Remove the undesired classes from the class roots.
1050   ObjPtr<mirror::ClassLoader> class_loader;
1051   {
1052     PruneClassLoaderClassesVisitor class_loader_visitor(this);
1053     VisitClassLoaders(&class_loader_visitor);
1054     VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
1055     class_loader = class_loader_visitor.GetClassLoader();
1056     DCHECK_EQ(class_loader != nullptr, compile_app_image_);
1057   }
1058 
1059   // Clear references to removed classes from the DexCaches.
1060   std::vector<ObjPtr<mirror::DexCache>> dex_caches;
1061   {
1062     ReaderMutexLock mu2(self, *Locks::dex_lock_);
1063     dex_caches.reserve(class_linker->GetDexCachesData().size());
1064     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1065       if (self->IsJWeakCleared(data.weak_root)) {
1066         continue;
1067       }
1068       dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
1069     }
1070   }
1071   for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
1072     PruneAndPreloadDexCache(dex_cache, class_loader);
1073   }
1074 
1075   // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
1076   class_linker->DropFindArrayClassCache();
1077 
1078   // Clear to save RAM.
1079   prune_class_memo_.clear();
1080 }
1081 
CheckNonImageClassesRemoved()1082 void ImageWriter::CheckNonImageClassesRemoved() {
1083   if (compiler_driver_.GetImageClasses() != nullptr) {
1084     gc::Heap* heap = Runtime::Current()->GetHeap();
1085     heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
1086   }
1087 }
1088 
CheckNonImageClassesRemovedCallback(Object * obj,void * arg)1089 void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
1090   ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
1091   if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
1092     Class* klass = obj->AsClass();
1093     if (!image_writer->KeepClass(klass)) {
1094       image_writer->DumpImageClasses();
1095       std::string temp;
1096       CHECK(image_writer->KeepClass(klass)) << klass->GetDescriptor(&temp)
1097                                             << " " << klass->PrettyDescriptor();
1098     }
1099   }
1100 }
1101 
DumpImageClasses()1102 void ImageWriter::DumpImageClasses() {
1103   auto image_classes = compiler_driver_.GetImageClasses();
1104   CHECK(image_classes != nullptr);
1105   for (const std::string& image_class : *image_classes) {
1106     LOG(INFO) << " " << image_class;
1107   }
1108 }
1109 
FindInternedString(mirror::String * string)1110 mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
1111   Thread* const self = Thread::Current();
1112   for (const ImageInfo& image_info : image_infos_) {
1113     ObjPtr<mirror::String> const found = image_info.intern_table_->LookupStrong(self, string);
1114     DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
1115         << string->ToModifiedUtf8();
1116     if (found != nullptr) {
1117       return found.Ptr();
1118     }
1119   }
1120   if (compile_app_image_) {
1121     Runtime* const runtime = Runtime::Current();
1122     ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string);
1123     // If we found it in the runtime intern table it could either be in the boot image or interned
1124     // during app image compilation. If it was in the boot image return that, otherwise return null
1125     // since it belongs to another image space.
1126     if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) {
1127       return found.Ptr();
1128     }
1129     DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
1130         << string->ToModifiedUtf8();
1131   }
1132   return nullptr;
1133 }
1134 
1135 
CreateImageRoots(size_t oat_index) const1136 ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
1137   Runtime* runtime = Runtime::Current();
1138   ClassLinker* class_linker = runtime->GetClassLinker();
1139   Thread* self = Thread::Current();
1140   StackHandleScope<3> hs(self);
1141   Handle<Class> object_array_class(hs.NewHandle(
1142       class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
1143 
1144   std::unordered_set<const DexFile*> image_dex_files;
1145   for (auto& pair : dex_file_oat_index_map_) {
1146     const DexFile* image_dex_file = pair.first;
1147     size_t image_oat_index = pair.second;
1148     if (oat_index == image_oat_index) {
1149       image_dex_files.insert(image_dex_file);
1150     }
1151   }
1152 
1153   // build an Object[] of all the DexCaches used in the source_space_.
1154   // Since we can't hold the dex lock when allocating the dex_caches
1155   // ObjectArray, we lock the dex lock twice, first to get the number
1156   // of dex caches first and then lock it again to copy the dex
1157   // caches. We check that the number of dex caches does not change.
1158   size_t dex_cache_count = 0;
1159   {
1160     ReaderMutexLock mu(self, *Locks::dex_lock_);
1161     // Count number of dex caches not in the boot image.
1162     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1163       ObjPtr<mirror::DexCache> dex_cache =
1164           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1165       if (dex_cache == nullptr) {
1166         continue;
1167       }
1168       const DexFile* dex_file = dex_cache->GetDexFile();
1169       if (!IsInBootImage(dex_cache.Ptr())) {
1170         dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1171       }
1172     }
1173   }
1174   Handle<ObjectArray<Object>> dex_caches(
1175       hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
1176   CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array.";
1177   {
1178     ReaderMutexLock mu(self, *Locks::dex_lock_);
1179     size_t non_image_dex_caches = 0;
1180     // Re-count number of non image dex caches.
1181     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1182       ObjPtr<mirror::DexCache> dex_cache =
1183           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1184       if (dex_cache == nullptr) {
1185         continue;
1186       }
1187       const DexFile* dex_file = dex_cache->GetDexFile();
1188       if (!IsInBootImage(dex_cache.Ptr())) {
1189         non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1190       }
1191     }
1192     CHECK_EQ(dex_cache_count, non_image_dex_caches)
1193         << "The number of non-image dex caches changed.";
1194     size_t i = 0;
1195     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1196       ObjPtr<mirror::DexCache> dex_cache =
1197           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1198       if (dex_cache == nullptr) {
1199         continue;
1200       }
1201       const DexFile* dex_file = dex_cache->GetDexFile();
1202       if (!IsInBootImage(dex_cache.Ptr()) &&
1203           image_dex_files.find(dex_file) != image_dex_files.end()) {
1204         dex_caches->Set<false>(i, dex_cache.Ptr());
1205         ++i;
1206       }
1207     }
1208   }
1209 
1210   // build an Object[] of the roots needed to restore the runtime
1211   int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compile_app_image_);
1212   auto image_roots(hs.NewHandle(
1213       ObjectArray<Object>::Alloc(self, object_array_class.Get(), image_roots_size)));
1214   image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
1215   image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
1216   // image_roots[ImageHeader::kClassLoader] will be set later for app image.
1217   static_assert(ImageHeader::kClassLoader + 1u == ImageHeader::kImageRootsMax,
1218                 "Class loader should be the last image root.");
1219   for (int32_t i = 0; i < ImageHeader::kImageRootsMax - 1; ++i) {
1220     CHECK(image_roots->Get(i) != nullptr);
1221   }
1222   return image_roots.Get();
1223 }
1224 
TryAssignBinSlot(WorkStack & work_stack,mirror::Object * obj,size_t oat_index)1225 mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
1226                                               mirror::Object* obj,
1227                                               size_t oat_index) {
1228   if (obj == nullptr || IsInBootImage(obj)) {
1229     // Object is null or already in the image, there is no work to do.
1230     return obj;
1231   }
1232   if (!IsImageBinSlotAssigned(obj)) {
1233     // We want to intern all strings but also assign offsets for the source string. Since the
1234     // pruning phase has already happened, if we intern a string to one in the image we still
1235     // end up copying an unreachable string.
1236     if (obj->IsString()) {
1237       // Need to check if the string is already interned in another image info so that we don't have
1238       // the intern tables of two different images contain the same string.
1239       mirror::String* interned = FindInternedString(obj->AsString());
1240       if (interned == nullptr) {
1241         // Not in another image space, insert to our table.
1242         interned =
1243             GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr();
1244         DCHECK_EQ(interned, obj);
1245       }
1246     } else if (obj->IsDexCache()) {
1247       oat_index = GetOatIndexForDexCache(obj->AsDexCache());
1248     } else if (obj->IsClass()) {
1249       // Visit and assign offsets for fields and field arrays.
1250       mirror::Class* as_klass = obj->AsClass();
1251       mirror::DexCache* dex_cache = as_klass->GetDexCache();
1252       DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus();
1253       if (compile_app_image_) {
1254         // Extra sanity, no boot loader classes should be left!
1255         CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass();
1256       }
1257       LengthPrefixedArray<ArtField>* fields[] = {
1258           as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1259       };
1260       // Overwrite the oat index value since the class' dex cache is more accurate of where it
1261       // belongs.
1262       oat_index = GetOatIndexForDexCache(dex_cache);
1263       ImageInfo& image_info = GetImageInfo(oat_index);
1264       if (!compile_app_image_) {
1265         // Note: Avoid locking to prevent lock order violations from root visiting;
1266         // image_info.class_table_ is only accessed from the image writer.
1267         image_info.class_table_->InsertWithoutLocks(as_klass);
1268       }
1269       for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1270         // Total array length including header.
1271         if (cur_fields != nullptr) {
1272           const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1273           // Forward the entire array at once.
1274           auto it = native_object_relocations_.find(cur_fields);
1275           CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1276                                                   << " already forwarded";
1277           size_t& offset = image_info.bin_slot_sizes_[kBinArtField];
1278           DCHECK(!IsInBootImage(cur_fields));
1279           native_object_relocations_.emplace(
1280               cur_fields,
1281               NativeObjectRelocation {
1282                   oat_index, offset, kNativeObjectRelocationTypeArtFieldArray
1283               });
1284           offset += header_size;
1285           // Forward individual fields so that we can quickly find where they belong.
1286           for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
1287             // Need to forward arrays separate of fields.
1288             ArtField* field = &cur_fields->At(i);
1289             auto it2 = native_object_relocations_.find(field);
1290             CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
1291                 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic();
1292             DCHECK(!IsInBootImage(field));
1293             native_object_relocations_.emplace(
1294                 field,
1295                 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField });
1296             offset += sizeof(ArtField);
1297           }
1298         }
1299       }
1300       // Visit and assign offsets for methods.
1301       size_t num_methods = as_klass->NumMethods();
1302       if (num_methods != 0) {
1303         bool any_dirty = false;
1304         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1305           if (WillMethodBeDirty(&m)) {
1306             any_dirty = true;
1307             break;
1308           }
1309         }
1310         NativeObjectRelocationType type = any_dirty
1311             ? kNativeObjectRelocationTypeArtMethodDirty
1312             : kNativeObjectRelocationTypeArtMethodClean;
1313         Bin bin_type = BinTypeForNativeRelocationType(type);
1314         // Forward the entire array at once, but header first.
1315         const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1316         const size_t method_size = ArtMethod::Size(target_ptr_size_);
1317         const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1318                                                                                method_size,
1319                                                                                method_alignment);
1320         LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
1321         auto it = native_object_relocations_.find(array);
1322         CHECK(it == native_object_relocations_.end())
1323             << "Method array " << array << " already forwarded";
1324         size_t& offset = image_info.bin_slot_sizes_[bin_type];
1325         DCHECK(!IsInBootImage(array));
1326         native_object_relocations_.emplace(array,
1327             NativeObjectRelocation {
1328                 oat_index,
1329                 offset,
1330                 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
1331                           : kNativeObjectRelocationTypeArtMethodArrayClean });
1332         offset += header_size;
1333         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1334           AssignMethodOffset(&m, type, oat_index);
1335         }
1336         (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
1337       }
1338       // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1339       // live.
1340       if (as_klass->ShouldHaveImt()) {
1341         ImTable* imt = as_klass->GetImt(target_ptr_size_);
1342         if (TryAssignImTableOffset(imt, oat_index)) {
1343           // Since imt's can be shared only do this the first time to not double count imt method
1344           // fixups.
1345           for (size_t i = 0; i < ImTable::kSize; ++i) {
1346             ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
1347             DCHECK(imt_method != nullptr);
1348             if (imt_method->IsRuntimeMethod() &&
1349                 !IsInBootImage(imt_method) &&
1350                 !NativeRelocationAssigned(imt_method)) {
1351               AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index);
1352             }
1353           }
1354         }
1355       }
1356     } else if (obj->IsClassLoader()) {
1357       // Register the class loader if it has a class table.
1358       // The fake boot class loader should not get registered and we should end up with only one
1359       // class loader.
1360       mirror::ClassLoader* class_loader = obj->AsClassLoader();
1361       if (class_loader->GetClassTable() != nullptr) {
1362         DCHECK(compile_app_image_);
1363         DCHECK(class_loaders_.empty());
1364         class_loaders_.insert(class_loader);
1365         ImageInfo& image_info = GetImageInfo(oat_index);
1366         // Note: Avoid locking to prevent lock order violations from root visiting;
1367         // image_info.class_table_ table is only accessed from the image writer
1368         // and class_loader->GetClassTable() is iterated but not modified.
1369         image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable());
1370       }
1371     }
1372     AssignImageBinSlot(obj, oat_index);
1373     work_stack.emplace(obj, oat_index);
1374   }
1375   if (obj->IsString()) {
1376     // Always return the interned string if there exists one.
1377     mirror::String* interned = FindInternedString(obj->AsString());
1378     if (interned != nullptr) {
1379       return interned;
1380     }
1381   }
1382   return obj;
1383 }
1384 
NativeRelocationAssigned(void * ptr) const1385 bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1386   return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1387 }
1388 
TryAssignImTableOffset(ImTable * imt,size_t oat_index)1389 bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
1390   // No offset, or already assigned.
1391   if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
1392     return false;
1393   }
1394   // If the method is a conflict method we also want to assign the conflict table offset.
1395   ImageInfo& image_info = GetImageInfo(oat_index);
1396   const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1397   native_object_relocations_.emplace(
1398       imt,
1399       NativeObjectRelocation {
1400           oat_index,
1401           image_info.bin_slot_sizes_[kBinImTable],
1402           kNativeObjectRelocationTypeIMTable});
1403   image_info.bin_slot_sizes_[kBinImTable] += size;
1404   return true;
1405 }
1406 
TryAssignConflictTableOffset(ImtConflictTable * table,size_t oat_index)1407 void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1408   // No offset, or already assigned.
1409   if (table == nullptr || NativeRelocationAssigned(table)) {
1410     return;
1411   }
1412   CHECK(!IsInBootImage(table));
1413   // If the method is a conflict method we also want to assign the conflict table offset.
1414   ImageInfo& image_info = GetImageInfo(oat_index);
1415   const size_t size = table->ComputeSize(target_ptr_size_);
1416   native_object_relocations_.emplace(
1417       table,
1418       NativeObjectRelocation {
1419           oat_index,
1420           image_info.bin_slot_sizes_[kBinIMTConflictTable],
1421           kNativeObjectRelocationTypeIMTConflictTable});
1422   image_info.bin_slot_sizes_[kBinIMTConflictTable] += size;
1423 }
1424 
AssignMethodOffset(ArtMethod * method,NativeObjectRelocationType type,size_t oat_index)1425 void ImageWriter::AssignMethodOffset(ArtMethod* method,
1426                                      NativeObjectRelocationType type,
1427                                      size_t oat_index) {
1428   DCHECK(!IsInBootImage(method));
1429   CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
1430       << ArtMethod::PrettyMethod(method);
1431   if (method->IsRuntimeMethod()) {
1432     TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1433   }
1434   ImageInfo& image_info = GetImageInfo(oat_index);
1435   size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
1436   native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
1437   offset += ArtMethod::Size(target_ptr_size_);
1438 }
1439 
EnsureBinSlotAssignedCallback(mirror::Object * obj,void * arg)1440 void ImageWriter::EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg) {
1441   ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1442   DCHECK(writer != nullptr);
1443   if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) {
1444     CHECK(writer->IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj;
1445   }
1446 }
1447 
DeflateMonitorCallback(mirror::Object * obj,void * arg ATTRIBUTE_UNUSED)1448 void ImageWriter::DeflateMonitorCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) {
1449   Monitor::Deflate(Thread::Current(), obj);
1450 }
1451 
UnbinObjectsIntoOffsetCallback(mirror::Object * obj,void * arg)1452 void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1453   ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1454   DCHECK(writer != nullptr);
1455   if (!writer->IsInBootImage(obj)) {
1456     writer->UnbinObjectsIntoOffset(obj);
1457   }
1458 }
1459 
UnbinObjectsIntoOffset(mirror::Object * obj)1460 void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
1461   DCHECK(!IsInBootImage(obj));
1462   CHECK(obj != nullptr);
1463 
1464   // We know the bin slot, and the total bin sizes for all objects by now,
1465   // so calculate the object's final image offset.
1466 
1467   DCHECK(IsImageBinSlotAssigned(obj));
1468   BinSlot bin_slot = GetImageBinSlot(obj);
1469   // Change the lockword from a bin slot into an offset
1470   AssignImageOffset(obj, bin_slot);
1471 }
1472 
1473 class ImageWriter::VisitReferencesVisitor {
1474  public:
VisitReferencesVisitor(ImageWriter * image_writer,WorkStack * work_stack,size_t oat_index)1475   VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
1476       : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
1477 
1478   // Fix up separately since we also need to fix up method entrypoints.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1479   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1480       REQUIRES_SHARED(Locks::mutator_lock_) {
1481     if (!root->IsNull()) {
1482       VisitRoot(root);
1483     }
1484   }
1485 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1486   ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1487       REQUIRES_SHARED(Locks::mutator_lock_) {
1488     root->Assign(VisitReference(root->AsMirrorPtr()));
1489   }
1490 
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1491   ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
1492                                  MemberOffset offset,
1493                                  bool is_static ATTRIBUTE_UNUSED) const
1494       REQUIRES_SHARED(Locks::mutator_lock_) {
1495     mirror::Object* ref =
1496         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1497     obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
1498   }
1499 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1500   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1501                                  ObjPtr<mirror::Reference> ref) const
1502       REQUIRES_SHARED(Locks::mutator_lock_) {
1503     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1504   }
1505 
1506  private:
VisitReference(mirror::Object * ref) const1507   mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1508     return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
1509   }
1510 
1511   ImageWriter* const image_writer_;
1512   WorkStack* const work_stack_;
1513   const size_t oat_index_;
1514 };
1515 
1516 class ImageWriter::GetRootsVisitor : public RootVisitor  {
1517  public:
GetRootsVisitor(std::vector<mirror::Object * > * roots)1518   explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
1519 
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)1520   void VisitRoots(mirror::Object*** roots,
1521                   size_t count,
1522                   const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1523       REQUIRES_SHARED(Locks::mutator_lock_) {
1524     for (size_t i = 0; i < count; ++i) {
1525       roots_->push_back(*roots[i]);
1526     }
1527   }
1528 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)1529   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
1530                   size_t count,
1531                   const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1532       REQUIRES_SHARED(Locks::mutator_lock_) {
1533     for (size_t i = 0; i < count; ++i) {
1534       roots_->push_back(roots[i]->AsMirrorPtr());
1535     }
1536   }
1537 
1538  private:
1539   std::vector<mirror::Object*>* const roots_;
1540 };
1541 
ProcessWorkStack(WorkStack * work_stack)1542 void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
1543   while (!work_stack->empty()) {
1544     std::pair<mirror::Object*, size_t> pair(work_stack->top());
1545     work_stack->pop();
1546     VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
1547     // Walk references and assign bin slots for them.
1548     pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1549         visitor,
1550         visitor);
1551   }
1552 }
1553 
CalculateNewObjectOffsets()1554 void ImageWriter::CalculateNewObjectOffsets() {
1555   Thread* const self = Thread::Current();
1556   VariableSizedHandleScope handles(self);
1557   std::vector<Handle<ObjectArray<Object>>> image_roots;
1558   for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1559     image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
1560   }
1561 
1562   Runtime* const runtime = Runtime::Current();
1563   gc::Heap* const heap = runtime->GetHeap();
1564 
1565   // Leave space for the header, but do not write it yet, we need to
1566   // know where image_roots is going to end up
1567   image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment);  // 64-bit-alignment
1568 
1569   const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1570   // Write the image runtime methods.
1571   image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1572   image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1573   image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
1574   image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
1575       runtime->GetCalleeSaveMethod(Runtime::kSaveAllCalleeSaves);
1576   image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
1577       runtime->GetCalleeSaveMethod(Runtime::kSaveRefsOnly);
1578   image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
1579       runtime->GetCalleeSaveMethod(Runtime::kSaveRefsAndArgs);
1580   image_methods_[ImageHeader::kSaveEverythingMethod] =
1581       runtime->GetCalleeSaveMethod(Runtime::kSaveEverything);
1582   // Visit image methods first to have the main runtime methods in the first image.
1583   for (auto* m : image_methods_) {
1584     CHECK(m != nullptr);
1585     CHECK(m->IsRuntimeMethod());
1586     DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1587     if (!IsInBootImage(m)) {
1588       AssignMethodOffset(m, kNativeObjectRelocationTypeRuntimeMethod, GetDefaultOatIndex());
1589     }
1590   }
1591 
1592   // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
1593   // this lock while holding other locks may cause lock order violations.
1594   heap->VisitObjects(DeflateMonitorCallback, this);
1595 
1596   // Work list of <object, oat_index> for objects. Everything on the stack must already be
1597   // assigned a bin slot.
1598   WorkStack work_stack;
1599 
1600   // Special case interned strings to put them in the image they are likely to be resolved from.
1601   for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
1602     auto it = dex_file_oat_index_map_.find(dex_file);
1603     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1604     const size_t oat_index = it->second;
1605     InternTable* const intern_table = runtime->GetInternTable();
1606     for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
1607       uint32_t utf16_length;
1608       const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
1609                                                                       &utf16_length);
1610       mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr();
1611       TryAssignBinSlot(work_stack, string, oat_index);
1612     }
1613   }
1614 
1615   // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
1616   // visits roots while holding various locks.
1617   {
1618     std::vector<mirror::Object*> roots;
1619     GetRootsVisitor root_visitor(&roots);
1620     runtime->VisitRoots(&root_visitor);
1621     for (mirror::Object* obj : roots) {
1622       TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
1623     }
1624   }
1625   ProcessWorkStack(&work_stack);
1626 
1627   // For app images, there may be objects that are only held live by the by the boot image. One
1628   // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
1629   // does not fail any checks. TODO: We should probably avoid copying these objects.
1630   if (compile_app_image_) {
1631     for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
1632       DCHECK(space->IsImageSpace());
1633       gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
1634       live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
1635                                     reinterpret_cast<uintptr_t>(space->Limit()),
1636                                     [this, &work_stack](mirror::Object* obj)
1637           REQUIRES_SHARED(Locks::mutator_lock_) {
1638         VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
1639         // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
1640         obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1641             visitor,
1642             visitor);
1643       });
1644     }
1645     // Process the work stack in case anything was added by TryAssignBinSlot.
1646     ProcessWorkStack(&work_stack);
1647 
1648     // Store the class loader in the class roots.
1649     CHECK_EQ(class_loaders_.size(), 1u);
1650     CHECK_EQ(image_roots.size(), 1u);
1651     CHECK(*class_loaders_.begin() != nullptr);
1652     image_roots[0]->Set<false>(ImageHeader::kClassLoader, *class_loaders_.begin());
1653   }
1654 
1655   // Verify that all objects have assigned image bin slots.
1656   heap->VisitObjects(EnsureBinSlotAssignedCallback, this);
1657 
1658   // Calculate size of the dex cache arrays slot and prepare offsets.
1659   PrepareDexCacheArraySlots();
1660 
1661   // Calculate the sizes of the intern tables, class tables, and fixup tables.
1662   for (ImageInfo& image_info : image_infos_) {
1663     // Calculate how big the intern table will be after being serialized.
1664     InternTable* const intern_table = image_info.intern_table_.get();
1665     CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1666     if (intern_table->StrongSize() != 0u) {
1667       image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1668     }
1669 
1670     // Calculate the size of the class table.
1671     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
1672     DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u);
1673     if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) {
1674       image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
1675     }
1676   }
1677 
1678   // Calculate bin slot offsets.
1679   for (ImageInfo& image_info : image_infos_) {
1680     size_t bin_offset = image_objects_offset_begin_;
1681     for (size_t i = 0; i != kBinSize; ++i) {
1682       switch (i) {
1683         case kBinArtMethodClean:
1684         case kBinArtMethodDirty: {
1685           bin_offset = RoundUp(bin_offset, method_alignment);
1686           break;
1687         }
1688         case kBinDexCacheArray:
1689           bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_));
1690           break;
1691         case kBinImTable:
1692         case kBinIMTConflictTable: {
1693           bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
1694           break;
1695         }
1696         default: {
1697           // Normal alignment.
1698         }
1699       }
1700       image_info.bin_slot_offsets_[i] = bin_offset;
1701       bin_offset += image_info.bin_slot_sizes_[i];
1702     }
1703     // NOTE: There may be additional padding between the bin slots and the intern table.
1704     DCHECK_EQ(image_info.image_end_,
1705               GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_);
1706   }
1707 
1708   // Calculate image offsets.
1709   size_t image_offset = 0;
1710   for (ImageInfo& image_info : image_infos_) {
1711     image_info.image_begin_ = global_image_begin_ + image_offset;
1712     image_info.image_offset_ = image_offset;
1713     ImageSection unused_sections[ImageHeader::kSectionCount];
1714     image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
1715     // There should be no gaps until the next image.
1716     image_offset += image_info.image_size_;
1717   }
1718 
1719   // Transform each object's bin slot into an offset which will be used to do the final copy.
1720   heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
1721 
1722   size_t i = 0;
1723   for (ImageInfo& image_info : image_infos_) {
1724     image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1725     i++;
1726   }
1727 
1728   // Update the native relocations by adding their bin sums.
1729   for (auto& pair : native_object_relocations_) {
1730     NativeObjectRelocation& relocation = pair.second;
1731     Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
1732     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
1733     relocation.offset += image_info.bin_slot_offsets_[bin_type];
1734   }
1735 }
1736 
CreateImageSections(ImageSection * out_sections) const1737 size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
1738   DCHECK(out_sections != nullptr);
1739 
1740   // Do not round up any sections here that are represented by the bins since it will break
1741   // offsets.
1742 
1743   // Objects section
1744   ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
1745   *objects_section = ImageSection(0u, image_end_);
1746 
1747   // Add field section.
1748   ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
1749   *field_section = ImageSection(bin_slot_offsets_[kBinArtField], bin_slot_sizes_[kBinArtField]);
1750   CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
1751 
1752   // Add method section.
1753   ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1754   *methods_section = ImageSection(
1755       bin_slot_offsets_[kBinArtMethodClean],
1756       bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]);
1757 
1758   // IMT section.
1759   ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
1760   *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]);
1761 
1762   // Conflict tables section.
1763   ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
1764   *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable],
1765                                               bin_slot_sizes_[kBinIMTConflictTable]);
1766 
1767   // Runtime methods section.
1768   ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
1769   *runtime_methods_section = ImageSection(bin_slot_offsets_[kBinRuntimeMethod],
1770                                           bin_slot_sizes_[kBinRuntimeMethod]);
1771 
1772   // Add dex cache arrays section.
1773   ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1774   *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray],
1775                                            bin_slot_sizes_[kBinDexCacheArray]);
1776   // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1777   size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
1778   // Calculate the size of the interned strings.
1779   ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
1780   *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1781   cur_pos = interned_strings_section->End();
1782   // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1783   cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1784   // Calculate the size of the class table section.
1785   ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
1786   *class_table_section = ImageSection(cur_pos, class_table_bytes_);
1787   cur_pos = class_table_section->End();
1788   // Image end goes right before the start of the image bitmap.
1789   return cur_pos;
1790 }
1791 
CreateHeader(size_t oat_index)1792 void ImageWriter::CreateHeader(size_t oat_index) {
1793   ImageInfo& image_info = GetImageInfo(oat_index);
1794   const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1795   const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1796   const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
1797 
1798   // Create the image sections.
1799   ImageSection sections[ImageHeader::kSectionCount];
1800   const size_t image_end = image_info.CreateImageSections(sections);
1801 
1802   // Finally bitmap section.
1803   const size_t bitmap_bytes = image_info.image_bitmap_->Size();
1804   auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1805   *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1806   if (VLOG_IS_ON(compiler)) {
1807     LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
1808     size_t idx = 0;
1809     for (const ImageSection& section : sections) {
1810       LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1811       ++idx;
1812     }
1813     LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1814     LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1815     LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1816               << " Image offset=" << image_info.image_offset_ << std::dec;
1817     LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1818               << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1819               << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1820               << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
1821   }
1822   // Store boot image info for app image so that we can relocate.
1823   uint32_t boot_image_begin = 0;
1824   uint32_t boot_image_end = 0;
1825   uint32_t boot_oat_begin = 0;
1826   uint32_t boot_oat_end = 0;
1827   gc::Heap* const heap = Runtime::Current()->GetHeap();
1828   heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
1829 
1830   // Create the header, leave 0 for data size since we will fill this in as we are writing the
1831   // image.
1832   new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1833                                                image_end,
1834                                                sections,
1835                                                image_info.image_roots_address_,
1836                                                image_info.oat_checksum_,
1837                                                PointerToLowMemUInt32(oat_file_begin),
1838                                                PointerToLowMemUInt32(image_info.oat_data_begin_),
1839                                                PointerToLowMemUInt32(oat_data_end),
1840                                                PointerToLowMemUInt32(oat_file_end),
1841                                                boot_image_begin,
1842                                                boot_image_end - boot_image_begin,
1843                                                boot_oat_begin,
1844                                                boot_oat_end - boot_oat_begin,
1845                                                static_cast<uint32_t>(target_ptr_size_),
1846                                                compile_pic_,
1847                                                /*is_pic*/compile_app_image_,
1848                                                image_storage_mode_,
1849                                                /*data_size*/0u);
1850 }
1851 
GetImageMethodAddress(ArtMethod * method)1852 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
1853   auto it = native_object_relocations_.find(method);
1854   CHECK(it != native_object_relocations_.end()) << ArtMethod::PrettyMethod(method) << " @ "
1855                                                 << method;
1856   size_t oat_index = GetOatIndex(method->GetDexCache());
1857   ImageInfo& image_info = GetImageInfo(oat_index);
1858   CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1859   return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
1860 }
1861 
1862 class ImageWriter::FixupRootVisitor : public RootVisitor {
1863  public:
FixupRootVisitor(ImageWriter * image_writer)1864   explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1865   }
1866 
VisitRoots(mirror::Object *** roots ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED,const RootInfo & info ATTRIBUTE_UNUSED)1867   void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
1868                   size_t count ATTRIBUTE_UNUSED,
1869                   const RootInfo& info ATTRIBUTE_UNUSED)
1870       OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
1871     LOG(FATAL) << "Unsupported";
1872   }
1873 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)1874   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1875                   const RootInfo& info ATTRIBUTE_UNUSED)
1876       OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
1877     for (size_t i = 0; i < count; ++i) {
1878       image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr());
1879     }
1880   }
1881 
1882  private:
1883   ImageWriter* const image_writer_;
1884 };
1885 
CopyAndFixupImTable(ImTable * orig,ImTable * copy)1886 void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
1887   for (size_t i = 0; i < ImTable::kSize; ++i) {
1888     ArtMethod* method = orig->Get(i, target_ptr_size_);
1889     void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
1890     CopyAndFixupPointer(address, method);
1891     DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
1892   }
1893 }
1894 
CopyAndFixupImtConflictTable(ImtConflictTable * orig,ImtConflictTable * copy)1895 void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
1896   const size_t count = orig->NumEntries(target_ptr_size_);
1897   for (size_t i = 0; i < count; ++i) {
1898     ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
1899     ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
1900     CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
1901     CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_),
1902                         implementation_method);
1903     DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
1904               NativeLocationInImage(interface_method));
1905     DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
1906               NativeLocationInImage(implementation_method));
1907   }
1908 }
1909 
CopyAndFixupNativeData(size_t oat_index)1910 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
1911   const ImageInfo& image_info = GetImageInfo(oat_index);
1912   // Copy ArtFields and methods to their locations and update the array for convenience.
1913   for (auto& pair : native_object_relocations_) {
1914     NativeObjectRelocation& relocation = pair.second;
1915     // Only work with fields and methods that are in the current oat file.
1916     if (relocation.oat_index != oat_index) {
1917       continue;
1918     }
1919     auto* dest = image_info.image_->Begin() + relocation.offset;
1920     DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
1921     DCHECK(!IsInBootImage(pair.first));
1922     switch (relocation.type) {
1923       case kNativeObjectRelocationTypeArtField: {
1924         memcpy(dest, pair.first, sizeof(ArtField));
1925         CopyReference(
1926             reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(),
1927             reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr());
1928         break;
1929       }
1930       case kNativeObjectRelocationTypeRuntimeMethod:
1931       case kNativeObjectRelocationTypeArtMethodClean:
1932       case kNativeObjectRelocationTypeArtMethodDirty: {
1933         CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1934                            reinterpret_cast<ArtMethod*>(dest),
1935                            image_info);
1936         break;
1937       }
1938       // For arrays, copy just the header since the elements will get copied by their corresponding
1939       // relocations.
1940       case kNativeObjectRelocationTypeArtFieldArray: {
1941         memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1942         break;
1943       }
1944       case kNativeObjectRelocationTypeArtMethodArrayClean:
1945       case kNativeObjectRelocationTypeArtMethodArrayDirty: {
1946         size_t size = ArtMethod::Size(target_ptr_size_);
1947         size_t alignment = ArtMethod::Alignment(target_ptr_size_);
1948         memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
1949         // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
1950         reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
1951         break;
1952       }
1953       case kNativeObjectRelocationTypeDexCacheArray:
1954         // Nothing to copy here, everything is done in FixupDexCache().
1955         break;
1956       case kNativeObjectRelocationTypeIMTable: {
1957         ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
1958         ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
1959         CopyAndFixupImTable(orig_imt, dest_imt);
1960         break;
1961       }
1962       case kNativeObjectRelocationTypeIMTConflictTable: {
1963         auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
1964         CopyAndFixupImtConflictTable(
1965             orig_table,
1966             new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
1967         break;
1968       }
1969     }
1970   }
1971   // Fixup the image method roots.
1972   auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
1973   for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1974     ArtMethod* method = image_methods_[i];
1975     CHECK(method != nullptr);
1976     if (!IsInBootImage(method)) {
1977       method = NativeLocationInImage(method);
1978     }
1979     image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
1980   }
1981   FixupRootVisitor root_visitor(this);
1982 
1983   // Write the intern table into the image.
1984   if (image_info.intern_table_bytes_ > 0) {
1985     const ImageSection& intern_table_section = image_header->GetImageSection(
1986         ImageHeader::kSectionInternedStrings);
1987     InternTable* const intern_table = image_info.intern_table_.get();
1988     uint8_t* const intern_table_memory_ptr =
1989         image_info.image_->Begin() + intern_table_section.Offset();
1990     const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
1991     CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
1992     // Fixup the pointers in the newly written intern table to contain image addresses.
1993     InternTable temp_intern_table;
1994     // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1995     // the VisitRoots() will update the memory directly rather than the copies.
1996     // This also relies on visit roots not doing any verification which could fail after we update
1997     // the roots to be the image addresses.
1998     temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
1999     CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
2000     temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
2001   }
2002   // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
2003   // class loaders. Writing multiple class tables into the image is currently unsupported.
2004   if (image_info.class_table_bytes_ > 0u) {
2005     const ImageSection& class_table_section = image_header->GetImageSection(
2006         ImageHeader::kSectionClassTable);
2007     uint8_t* const class_table_memory_ptr =
2008         image_info.image_->Begin() + class_table_section.Offset();
2009     ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
2010 
2011     ClassTable* table = image_info.class_table_.get();
2012     CHECK(table != nullptr);
2013     const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
2014     CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
2015     // Fixup the pointers in the newly written class table to contain image addresses. See
2016     // above comment for intern tables.
2017     ClassTable temp_class_table;
2018     temp_class_table.ReadFromMemory(class_table_memory_ptr);
2019     CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(),
2020              table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses());
2021     UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
2022     temp_class_table.VisitRoots(visitor);
2023   }
2024 }
2025 
CopyAndFixupObjects()2026 void ImageWriter::CopyAndFixupObjects() {
2027   gc::Heap* heap = Runtime::Current()->GetHeap();
2028   heap->VisitObjects(CopyAndFixupObjectsCallback, this);
2029   // Fix up the object previously had hash codes.
2030   for (const auto& hash_pair : saved_hashcode_map_) {
2031     Object* obj = hash_pair.first;
2032     DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
2033     obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
2034   }
2035   saved_hashcode_map_.clear();
2036 }
2037 
CopyAndFixupObjectsCallback(Object * obj,void * arg)2038 void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
2039   DCHECK(obj != nullptr);
2040   DCHECK(arg != nullptr);
2041   reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
2042 }
2043 
FixupPointerArray(mirror::Object * dst,mirror::PointerArray * arr,mirror::Class * klass,Bin array_type)2044 void ImageWriter::FixupPointerArray(mirror::Object* dst,
2045                                     mirror::PointerArray* arr,
2046                                     mirror::Class* klass,
2047                                     Bin array_type) {
2048   CHECK(klass->IsArrayClass());
2049   CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr;
2050   // Fixup int and long pointers for the ArtMethod or ArtField arrays.
2051   const size_t num_elements = arr->GetLength();
2052   dst->SetClass(GetImageAddress(arr->GetClass()));
2053   auto* dest_array = down_cast<mirror::PointerArray*>(dst);
2054   for (size_t i = 0, count = num_elements; i < count; ++i) {
2055     void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
2056     if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
2057       auto it = native_object_relocations_.find(elem);
2058       if (UNLIKELY(it == native_object_relocations_.end())) {
2059         if (it->second.IsArtMethodRelocation()) {
2060           auto* method = reinterpret_cast<ArtMethod*>(elem);
2061           LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
2062                      << method << " idx=" << i << "/" << num_elements << " with declaring class "
2063                      << Class::PrettyClass(method->GetDeclaringClass());
2064         } else {
2065           CHECK_EQ(array_type, kBinArtField);
2066           auto* field = reinterpret_cast<ArtField*>(elem);
2067           LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ "
2068               << field << " idx=" << i << "/" << num_elements << " with declaring class "
2069               << Class::PrettyClass(field->GetDeclaringClass());
2070         }
2071         UNREACHABLE();
2072       }
2073     }
2074     CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
2075   }
2076 }
2077 
CopyAndFixupObject(Object * obj)2078 void ImageWriter::CopyAndFixupObject(Object* obj) {
2079   if (IsInBootImage(obj)) {
2080     return;
2081   }
2082   size_t offset = GetImageOffset(obj);
2083   size_t oat_index = GetOatIndex(obj);
2084   ImageInfo& image_info = GetImageInfo(oat_index);
2085   auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
2086   DCHECK_LT(offset, image_info.image_end_);
2087   const auto* src = reinterpret_cast<const uint8_t*>(obj);
2088 
2089   image_info.image_bitmap_->Set(dst);  // Mark the obj as live.
2090 
2091   const size_t n = obj->SizeOf();
2092   DCHECK_LE(offset + n, image_info.image_->Size());
2093   memcpy(dst, src, n);
2094 
2095   // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
2096   // word.
2097   const auto it = saved_hashcode_map_.find(obj);
2098   dst->SetLockWord(it != saved_hashcode_map_.end() ?
2099       LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
2100   if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
2101     // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
2102     // safe since we mark all of the objects that may reference non immune objects as gray.
2103     CHECK(dst->AtomicSetMarkBit(0, 1));
2104   }
2105   FixupObject(obj, dst);
2106 }
2107 
2108 // Rewrite all the references in the copied object to point to their image address equivalent
2109 class ImageWriter::FixupVisitor {
2110  public:
FixupVisitor(ImageWriter * image_writer,Object * copy)2111   FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
2112   }
2113 
2114   // Ignore class roots since we don't have a way to map them to the destination. These are handled
2115   // with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const2116   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
2117       const {}
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const2118   void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
2119 
2120 
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const2121   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
2122       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2123     ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
2124     // Copy the reference and record the fixup if necessary.
2125     image_writer_->CopyReference(
2126         copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset),
2127         ref.Ptr());
2128   }
2129 
2130   // java.lang.ref.Reference visitor.
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const2131   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2132                   ObjPtr<mirror::Reference> ref) const
2133       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2134     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
2135   }
2136 
2137  protected:
2138   ImageWriter* const image_writer_;
2139   mirror::Object* const copy_;
2140 };
2141 
2142 class ImageWriter::FixupClassVisitor FINAL : public FixupVisitor {
2143  public:
FixupClassVisitor(ImageWriter * image_writer,Object * copy)2144   FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
2145   }
2146 
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const2147   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
2148       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
2149     DCHECK(obj->IsClass());
2150     FixupVisitor::operator()(obj, offset, /*is_static*/false);
2151   }
2152 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const2153   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2154                   ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
2155       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2156     LOG(FATAL) << "Reference not expected here.";
2157   }
2158 };
2159 
NativeOffsetInImage(void * obj)2160 uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
2161   DCHECK(obj != nullptr);
2162   DCHECK(!IsInBootImage(obj));
2163   auto it = native_object_relocations_.find(obj);
2164   CHECK(it != native_object_relocations_.end()) << obj << " spaces "
2165       << Runtime::Current()->GetHeap()->DumpSpaces();
2166   const NativeObjectRelocation& relocation = it->second;
2167   return relocation.offset;
2168 }
2169 
2170 template <typename T>
PrettyPrint(T * ptr)2171 std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
2172   std::ostringstream oss;
2173   oss << ptr;
2174   return oss.str();
2175 }
2176 
2177 template <>
PrettyPrint(ArtMethod * method)2178 std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
2179   return ArtMethod::PrettyMethod(method);
2180 }
2181 
2182 template <typename T>
NativeLocationInImage(T * obj)2183 T* ImageWriter::NativeLocationInImage(T* obj) {
2184   if (obj == nullptr || IsInBootImage(obj)) {
2185     return obj;
2186   } else {
2187     auto it = native_object_relocations_.find(obj);
2188     CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj)
2189         << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces();
2190     const NativeObjectRelocation& relocation = it->second;
2191     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2192     return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
2193   }
2194 }
2195 
2196 template <typename T>
NativeCopyLocation(T * obj,mirror::DexCache * dex_cache)2197 T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
2198   if (obj == nullptr || IsInBootImage(obj)) {
2199     return obj;
2200   } else {
2201     size_t oat_index = GetOatIndexForDexCache(dex_cache);
2202     ImageInfo& image_info = GetImageInfo(oat_index);
2203     return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
2204   }
2205 }
2206 
2207 class ImageWriter::NativeLocationVisitor {
2208  public:
NativeLocationVisitor(ImageWriter * image_writer)2209   explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
2210 
2211   template <typename T>
operator ()(T * ptr,void ** dest_addr=nullptr) const2212   T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2213     if (dest_addr != nullptr) {
2214       image_writer_->CopyAndFixupPointer(dest_addr, ptr);
2215     }
2216     return image_writer_->NativeLocationInImage(ptr);
2217   }
2218 
2219  private:
2220   ImageWriter* const image_writer_;
2221 };
2222 
FixupClass(mirror::Class * orig,mirror::Class * copy)2223 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
2224   orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
2225   FixupClassVisitor visitor(this, copy);
2226   ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
2227 
2228   // Remove the clinitThreadId. This is required for image determinism.
2229   copy->SetClinitThreadId(static_cast<pid_t>(0));
2230 }
2231 
FixupObject(Object * orig,Object * copy)2232 void ImageWriter::FixupObject(Object* orig, Object* copy) {
2233   DCHECK(orig != nullptr);
2234   DCHECK(copy != nullptr);
2235   if (kUseBakerReadBarrier) {
2236     orig->AssertReadBarrierState();
2237   }
2238   auto* klass = orig->GetClass();
2239   if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
2240     // Is this a native pointer array?
2241     auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
2242     if (it != pointer_arrays_.end()) {
2243       // Should only need to fixup every pointer array exactly once.
2244       FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
2245       pointer_arrays_.erase(it);
2246       return;
2247     }
2248   }
2249   if (orig->IsClass()) {
2250     FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
2251   } else {
2252     if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
2253       // Need to go update the ArtMethod.
2254       auto* dest = down_cast<mirror::Executable*>(copy);
2255       auto* src = down_cast<mirror::Executable*>(orig);
2256       ArtMethod* src_method = src->GetArtMethod();
2257       dest->SetArtMethod(GetImageMethodAddress(src_method));
2258     } else if (!klass->IsArrayClass()) {
2259       ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2260       if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
2261         FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
2262       } else if (klass->IsClassLoaderClass()) {
2263         mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
2264         // If src is a ClassLoader, set the class table to null so that it gets recreated by the
2265         // ClassLoader.
2266         copy_loader->SetClassTable(nullptr);
2267         // Also set allocator to null to be safe. The allocator is created when we create the class
2268         // table. We also never expect to unload things in the image since they are held live as
2269         // roots.
2270         copy_loader->SetAllocator(nullptr);
2271       }
2272     }
2273     FixupVisitor visitor(this, copy);
2274     orig->VisitReferences(visitor, visitor);
2275   }
2276 }
2277 
2278 class ImageWriter::ImageAddressVisitorForDexCacheArray {
2279  public:
ImageAddressVisitorForDexCacheArray(ImageWriter * image_writer)2280   explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer)
2281       : image_writer_(image_writer) {}
2282 
2283   template <typename T>
operator ()(T * ptr) const2284   T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2285     return image_writer_->GetImageAddress(ptr);
2286   }
2287 
2288  private:
2289   ImageWriter* const image_writer_;
2290 };
2291 
FixupDexCache(mirror::DexCache * orig_dex_cache,mirror::DexCache * copy_dex_cache)2292 void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
2293                                 mirror::DexCache* copy_dex_cache) {
2294   ImageAddressVisitorForDexCacheArray fixup_visitor(this);
2295   // Though the DexCache array fields are usually treated as native pointers, we set the full
2296   // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
2297   // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
2298   //     static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
2299   mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings();
2300   if (orig_strings != nullptr) {
2301     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
2302                                                NativeLocationInImage(orig_strings),
2303                                                PointerSize::k64);
2304     orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor);
2305   }
2306   mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes();
2307   if (orig_types != nullptr) {
2308     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
2309                                                NativeLocationInImage(orig_types),
2310                                                PointerSize::k64);
2311     orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
2312                                        fixup_visitor);
2313   }
2314   ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
2315   if (orig_methods != nullptr) {
2316     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
2317                                                NativeLocationInImage(orig_methods),
2318                                                PointerSize::k64);
2319     ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
2320     for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
2321       ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
2322       // NativeLocationInImage also handles runtime methods since these have relocation info.
2323       ArtMethod* copy = NativeLocationInImage(orig);
2324       mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
2325     }
2326   }
2327   mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
2328   if (orig_fields != nullptr) {
2329     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
2330                                                NativeLocationInImage(orig_fields),
2331                                                PointerSize::k64);
2332     mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
2333     for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
2334       mirror::FieldDexCachePair orig =
2335           mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_);
2336       mirror::FieldDexCachePair copy = orig;
2337       copy.object = NativeLocationInImage(orig.object);
2338       mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_);
2339     }
2340   }
2341   mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes();
2342   if (orig_method_types != nullptr) {
2343     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodTypesOffset(),
2344                                                NativeLocationInImage(orig_method_types),
2345                                                PointerSize::k64);
2346     orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache),
2347                                              fixup_visitor);
2348   }
2349   GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites();
2350   if (orig_call_sites != nullptr) {
2351     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedCallSitesOffset(),
2352                                                NativeLocationInImage(orig_call_sites),
2353                                                PointerSize::k64);
2354     orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache),
2355                                            fixup_visitor);
2356   }
2357 
2358   // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
2359   // compiler pointers in here will make the output non-deterministic.
2360   copy_dex_cache->SetDexFile(nullptr);
2361 }
2362 
GetOatAddress(OatAddress type) const2363 const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
2364   DCHECK_LT(type, kOatAddressCount);
2365   // If we are compiling an app image, we need to use the stubs of the boot image.
2366   if (compile_app_image_) {
2367     // Use the current image pointers.
2368     const std::vector<gc::space::ImageSpace*>& image_spaces =
2369         Runtime::Current()->GetHeap()->GetBootImageSpaces();
2370     DCHECK(!image_spaces.empty());
2371     const OatFile* oat_file = image_spaces[0]->GetOatFile();
2372     CHECK(oat_file != nullptr);
2373     const OatHeader& header = oat_file->GetOatHeader();
2374     switch (type) {
2375       // TODO: We could maybe clean this up if we stored them in an array in the oat header.
2376       case kOatAddressQuickGenericJNITrampoline:
2377         return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
2378       case kOatAddressInterpreterToInterpreterBridge:
2379         return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
2380       case kOatAddressInterpreterToCompiledCodeBridge:
2381         return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
2382       case kOatAddressJNIDlsymLookup:
2383         return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
2384       case kOatAddressQuickIMTConflictTrampoline:
2385         return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
2386       case kOatAddressQuickResolutionTrampoline:
2387         return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
2388       case kOatAddressQuickToInterpreterBridge:
2389         return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
2390       default:
2391         UNREACHABLE();
2392     }
2393   }
2394   const ImageInfo& primary_image_info = GetImageInfo(0);
2395   return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info);
2396 }
2397 
GetQuickCode(ArtMethod * method,const ImageInfo & image_info,bool * quick_is_interpreted)2398 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
2399                                          const ImageInfo& image_info,
2400                                          bool* quick_is_interpreted) {
2401   DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
2402   DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
2403   DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
2404   DCHECK(method->IsInvokable()) << method->PrettyMethod();
2405   DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
2406 
2407   // Use original code if it exists. Otherwise, set the code pointer to the resolution
2408   // trampoline.
2409 
2410   // Quick entrypoint:
2411   const void* quick_oat_entry_point =
2412       method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
2413   const uint8_t* quick_code;
2414 
2415   if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
2416     DCHECK(method->IsCopied());
2417     // If the code is not in the oat file corresponding to this image (e.g. default methods)
2418     quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
2419   } else {
2420     uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
2421     quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
2422   }
2423 
2424   *quick_is_interpreted = false;
2425   if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
2426       method->GetDeclaringClass()->IsInitialized())) {
2427     // We have code for a non-static or initialized method, just use the code.
2428   } else if (quick_code == nullptr && method->IsNative() &&
2429       (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2430     // Non-static or initialized native method missing compiled code, use generic JNI version.
2431     quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
2432   } else if (quick_code == nullptr && !method->IsNative()) {
2433     // We don't have code at all for a non-native method, use the interpreter.
2434     quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
2435     *quick_is_interpreted = true;
2436   } else {
2437     CHECK(!method->GetDeclaringClass()->IsInitialized());
2438     // We have code for a static method, but need to go through the resolution stub for class
2439     // initialization.
2440     quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
2441   }
2442   if (!IsInBootOatFile(quick_code)) {
2443     // DCHECK_GE(quick_code, oat_data_begin_);
2444   }
2445   return quick_code;
2446 }
2447 
CopyAndFixupMethod(ArtMethod * orig,ArtMethod * copy,const ImageInfo & image_info)2448 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2449                                      ArtMethod* copy,
2450                                      const ImageInfo& image_info) {
2451   if (orig->IsAbstract()) {
2452     // Ignore the single-implementation info for abstract method.
2453     // Do this on orig instead of copy, otherwise there is a crash due to methods
2454     // are copied before classes.
2455     // TODO: handle fixup of single-implementation method for abstract method.
2456     orig->SetHasSingleImplementation(false);
2457     orig->SetSingleImplementation(
2458         nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
2459   }
2460 
2461   memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
2462 
2463   CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
2464 
2465   ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
2466   copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
2467 
2468   // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2469   // oat_begin_
2470 
2471   // The resolution method has a special trampoline to call.
2472   Runtime* runtime = Runtime::Current();
2473   if (orig->IsRuntimeMethod()) {
2474     ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
2475     if (orig_table != nullptr) {
2476       // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
2477       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2478           GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
2479       copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_);
2480     } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2481       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2482           GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
2483     } else {
2484       bool found_one = false;
2485       for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
2486         auto idx = static_cast<Runtime::CalleeSaveType>(i);
2487         if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2488           found_one = true;
2489           break;
2490         }
2491       }
2492       CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
2493       CHECK(copy->IsRuntimeMethod());
2494     }
2495   } else {
2496     // We assume all methods have code. If they don't currently then we set them to the use the
2497     // resolution trampoline. Abstract methods never have code and so we need to make sure their
2498     // use results in an AbstractMethodError. We use the interpreter to achieve this.
2499     if (UNLIKELY(!orig->IsInvokable())) {
2500       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2501           GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
2502     } else {
2503       bool quick_is_interpreted;
2504       const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
2505       copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
2506 
2507       // JNI entrypoint:
2508       if (orig->IsNative()) {
2509         // The native method's pointer is set to a stub to lookup via dlsym.
2510         // Note this is not the code_ pointer, that is handled above.
2511         copy->SetEntryPointFromJniPtrSize(
2512             GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
2513       }
2514     }
2515   }
2516 }
2517 
GetBinSizeSum(ImageWriter::ImageInfo & image_info,ImageWriter::Bin up_to) const2518 size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const {
2519   DCHECK_LE(up_to, kBinSize);
2520   return std::accumulate(&image_info.bin_slot_sizes_[0],
2521                          &image_info.bin_slot_sizes_[up_to],
2522                          /*init*/0);
2523 }
2524 
BinSlot(uint32_t lockword)2525 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2526   // These values may need to get updated if more bins are added to the enum Bin
2527   static_assert(kBinBits == 3, "wrong number of bin bits");
2528   static_assert(kBinShift == 27, "wrong number of shift");
2529   static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2530 
2531   DCHECK_LT(GetBin(), kBinSize);
2532   DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2533 }
2534 
BinSlot(Bin bin,uint32_t index)2535 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2536     : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2537   DCHECK_EQ(index, GetIndex());
2538 }
2539 
GetBin() const2540 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2541   return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2542 }
2543 
GetIndex() const2544 uint32_t ImageWriter::BinSlot::GetIndex() const {
2545   return lockword_ & ~kBinMask;
2546 }
2547 
BinTypeForNativeRelocationType(NativeObjectRelocationType type)2548 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2549   switch (type) {
2550     case kNativeObjectRelocationTypeArtField:
2551     case kNativeObjectRelocationTypeArtFieldArray:
2552       return kBinArtField;
2553     case kNativeObjectRelocationTypeArtMethodClean:
2554     case kNativeObjectRelocationTypeArtMethodArrayClean:
2555       return kBinArtMethodClean;
2556     case kNativeObjectRelocationTypeArtMethodDirty:
2557     case kNativeObjectRelocationTypeArtMethodArrayDirty:
2558       return kBinArtMethodDirty;
2559     case kNativeObjectRelocationTypeDexCacheArray:
2560       return kBinDexCacheArray;
2561     case kNativeObjectRelocationTypeRuntimeMethod:
2562       return kBinRuntimeMethod;
2563     case kNativeObjectRelocationTypeIMTable:
2564       return kBinImTable;
2565     case kNativeObjectRelocationTypeIMTConflictTable:
2566       return kBinIMTConflictTable;
2567   }
2568   UNREACHABLE();
2569 }
2570 
GetOatIndex(mirror::Object * obj) const2571 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
2572   if (!IsMultiImage()) {
2573     return GetDefaultOatIndex();
2574   }
2575   auto it = oat_index_map_.find(obj);
2576   DCHECK(it != oat_index_map_.end()) << obj;
2577   return it->second;
2578 }
2579 
GetOatIndexForDexFile(const DexFile * dex_file) const2580 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
2581   if (!IsMultiImage()) {
2582     return GetDefaultOatIndex();
2583   }
2584   auto it = dex_file_oat_index_map_.find(dex_file);
2585   DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2586   return it->second;
2587 }
2588 
GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const2589 size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
2590   return (dex_cache == nullptr)
2591       ? GetDefaultOatIndex()
2592       : GetOatIndexForDexFile(dex_cache->GetDexFile());
2593 }
2594 
UpdateOatFileLayout(size_t oat_index,size_t oat_loaded_size,size_t oat_data_offset,size_t oat_data_size)2595 void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2596                                       size_t oat_loaded_size,
2597                                       size_t oat_data_offset,
2598                                       size_t oat_data_size) {
2599   const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2600   for (const ImageInfo& info : image_infos_) {
2601     DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2602   }
2603   DCHECK(images_end != nullptr);  // Image space must be ready.
2604 
2605   ImageInfo& cur_image_info = GetImageInfo(oat_index);
2606   cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2607   cur_image_info.oat_loaded_size_ = oat_loaded_size;
2608   cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2609   cur_image_info.oat_size_ = oat_data_size;
2610 
2611   if (compile_app_image_) {
2612     CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2613     return;
2614   }
2615 
2616   // Update the oat_offset of the next image info.
2617   if (oat_index + 1u != oat_filenames_.size()) {
2618     // There is a following one.
2619     ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
2620     next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2621   }
2622 }
2623 
UpdateOatFileHeader(size_t oat_index,const OatHeader & oat_header)2624 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2625   ImageInfo& cur_image_info = GetImageInfo(oat_index);
2626   cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2627 
2628   if (oat_index == GetDefaultOatIndex()) {
2629     // Primary oat file, read the trampolines.
2630     cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
2631         oat_header.GetInterpreterToInterpreterBridgeOffset();
2632     cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
2633         oat_header.GetInterpreterToCompiledCodeBridgeOffset();
2634     cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] =
2635         oat_header.GetJniDlsymLookupOffset();
2636     cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
2637         oat_header.GetQuickGenericJniTrampolineOffset();
2638     cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
2639         oat_header.GetQuickImtConflictTrampolineOffset();
2640     cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
2641         oat_header.GetQuickResolutionTrampolineOffset();
2642     cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
2643         oat_header.GetQuickToInterpreterBridgeOffset();
2644   }
2645 }
2646 
ImageWriter(const CompilerDriver & compiler_driver,uintptr_t image_begin,bool compile_pic,bool compile_app_image,ImageHeader::StorageMode image_storage_mode,const std::vector<const char * > & oat_filenames,const std::unordered_map<const DexFile *,size_t> & dex_file_oat_index_map)2647 ImageWriter::ImageWriter(
2648     const CompilerDriver& compiler_driver,
2649     uintptr_t image_begin,
2650     bool compile_pic,
2651     bool compile_app_image,
2652     ImageHeader::StorageMode image_storage_mode,
2653     const std::vector<const char*>& oat_filenames,
2654     const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map)
2655     : compiler_driver_(compiler_driver),
2656       global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2657       image_objects_offset_begin_(0),
2658       compile_pic_(compile_pic),
2659       compile_app_image_(compile_app_image),
2660       target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
2661       image_infos_(oat_filenames.size()),
2662       dirty_methods_(0u),
2663       clean_methods_(0u),
2664       image_storage_mode_(image_storage_mode),
2665       oat_filenames_(oat_filenames),
2666       dex_file_oat_index_map_(dex_file_oat_index_map) {
2667   CHECK_NE(image_begin, 0U);
2668   std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
2669   CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2670       << "Compiling a boot image should occur iff there are no boot image spaces loaded";
2671 }
2672 
ImageInfo()2673 ImageWriter::ImageInfo::ImageInfo()
2674     : intern_table_(new InternTable),
2675       class_table_(new ClassTable) {}
2676 
CopyReference(mirror::HeapReference<mirror::Object> * dest,ObjPtr<mirror::Object> src)2677 void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest,
2678                                 ObjPtr<mirror::Object> src) {
2679   dest->Assign(GetImageAddress(src.Ptr()));
2680 }
2681 
CopyReference(mirror::CompressedReference<mirror::Object> * dest,ObjPtr<mirror::Object> src)2682 void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest,
2683                                 ObjPtr<mirror::Object> src) {
2684   dest->Assign(GetImageAddress(src.Ptr()));
2685 }
2686 
CopyAndFixupPointer(void ** target,void * value)2687 void ImageWriter::CopyAndFixupPointer(void** target, void* value) {
2688   void* new_value = value;
2689   if (value != nullptr && !IsInBootImage(value)) {
2690     auto it = native_object_relocations_.find(value);
2691     CHECK(it != native_object_relocations_.end()) << value;
2692     const NativeObjectRelocation& relocation = it->second;
2693     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2694     new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset);
2695   }
2696   if (target_ptr_size_ == PointerSize::k32) {
2697     *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value);
2698   } else {
2699     *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value);
2700   }
2701 }
2702 
2703 
2704 }  // namespace art
2705