1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "image_writer.h"
18 
19 #include <lz4.h>
20 #include <lz4hc.h>
21 #include <sys/stat.h>
22 
23 #include <memory>
24 #include <numeric>
25 #include <unordered_set>
26 #include <vector>
27 
28 #include "art_field-inl.h"
29 #include "art_method-inl.h"
30 #include "base/callee_save_type.h"
31 #include "base/enums.h"
32 #include "base/logging.h"  // For VLOG.
33 #include "base/unix_file/fd_file.h"
34 #include "class_linker-inl.h"
35 #include "compiled_method.h"
36 #include "dex/dex_file-inl.h"
37 #include "dex/dex_file_types.h"
38 #include "driver/compiler_driver.h"
39 #include "elf_file.h"
40 #include "elf_utils.h"
41 #include "gc/accounting/card_table-inl.h"
42 #include "gc/accounting/heap_bitmap.h"
43 #include "gc/accounting/space_bitmap-inl.h"
44 #include "gc/collector/concurrent_copying.h"
45 #include "gc/heap-visit-objects-inl.h"
46 #include "gc/heap.h"
47 #include "gc/space/large_object_space.h"
48 #include "gc/space/space-inl.h"
49 #include "gc/verification.h"
50 #include "globals.h"
51 #include "handle_scope-inl.h"
52 #include "image.h"
53 #include "imt_conflict_table.h"
54 #include "subtype_check.h"
55 #include "jni_internal.h"
56 #include "linear_alloc.h"
57 #include "lock_word.h"
58 #include "mirror/array-inl.h"
59 #include "mirror/class-inl.h"
60 #include "mirror/class_ext.h"
61 #include "mirror/class_loader.h"
62 #include "mirror/dex_cache-inl.h"
63 #include "mirror/dex_cache.h"
64 #include "mirror/executable.h"
65 #include "mirror/method.h"
66 #include "mirror/object-inl.h"
67 #include "mirror/object-refvisitor-inl.h"
68 #include "mirror/object_array-inl.h"
69 #include "mirror/string-inl.h"
70 #include "oat.h"
71 #include "oat_file.h"
72 #include "oat_file_manager.h"
73 #include "runtime.h"
74 #include "scoped_thread_state_change-inl.h"
75 #include "utils/dex_cache_arrays_layout-inl.h"
76 #include "well_known_classes.h"
77 
78 using ::art::mirror::Class;
79 using ::art::mirror::DexCache;
80 using ::art::mirror::Object;
81 using ::art::mirror::ObjectArray;
82 using ::art::mirror::String;
83 
84 namespace art {
85 namespace linker {
86 
87 // Separate objects into multiple bins to optimize dirty memory use.
88 static constexpr bool kBinObjects = true;
89 
90 // Return true if an object is already in an image space.
IsInBootImage(const void * obj) const91 bool ImageWriter::IsInBootImage(const void* obj) const {
92   gc::Heap* const heap = Runtime::Current()->GetHeap();
93   if (!compile_app_image_) {
94     DCHECK(heap->GetBootImageSpaces().empty());
95     return false;
96   }
97   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
98     const uint8_t* image_begin = boot_image_space->Begin();
99     // Real image end including ArtMethods and ArtField sections.
100     const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
101     if (image_begin <= obj && obj < image_end) {
102       return true;
103     }
104   }
105   return false;
106 }
107 
IsInBootOatFile(const void * ptr) const108 bool ImageWriter::IsInBootOatFile(const void* ptr) const {
109   gc::Heap* const heap = Runtime::Current()->GetHeap();
110   if (!compile_app_image_) {
111     DCHECK(heap->GetBootImageSpaces().empty());
112     return false;
113   }
114   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
115     const ImageHeader& image_header = boot_image_space->GetImageHeader();
116     if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
117       return true;
118     }
119   }
120   return false;
121 }
122 
ClearDexFileCookies()123 static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
124   auto visitor = [](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
125     DCHECK(obj != nullptr);
126     Class* klass = obj->GetClass();
127     if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
128       ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
129       // Null out the cookie to enable determinism. b/34090128
130       field->SetObject</*kTransactionActive*/false>(obj, nullptr);
131     }
132   };
133   Runtime::Current()->GetHeap()->VisitObjects(visitor);
134 }
135 
PrepareImageAddressSpace()136 bool ImageWriter::PrepareImageAddressSpace() {
137   target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
138   gc::Heap* const heap = Runtime::Current()->GetHeap();
139   {
140     ScopedObjectAccess soa(Thread::Current());
141     PruneNonImageClasses();  // Remove junk
142     if (compile_app_image_) {
143       // Clear dex file cookies for app images to enable app image determinism. This is required
144       // since the cookie field contains long pointers to DexFiles which are not deterministic.
145       // b/34090128
146       ClearDexFileCookies();
147     } else {
148       // Avoid for app image since this may increase RAM and image size.
149       ComputeLazyFieldsForImageClasses();  // Add useful information
150     }
151   }
152   heap->CollectGarbage(/* clear_soft_references */ false);  // Remove garbage.
153 
154   if (kIsDebugBuild) {
155     ScopedObjectAccess soa(Thread::Current());
156     CheckNonImageClassesRemoved();
157   }
158 
159   {
160     ScopedObjectAccess soa(Thread::Current());
161     CalculateNewObjectOffsets();
162   }
163 
164   // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
165   // bin size sums being calculated.
166   if (!AllocMemory()) {
167     return false;
168   }
169 
170   return true;
171 }
172 
Write(int image_fd,const std::vector<const char * > & image_filenames,const std::vector<const char * > & oat_filenames)173 bool ImageWriter::Write(int image_fd,
174                         const std::vector<const char*>& image_filenames,
175                         const std::vector<const char*>& oat_filenames) {
176   // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
177   // oat_filenames.
178   CHECK(!image_filenames.empty());
179   if (image_fd != kInvalidFd) {
180     CHECK_EQ(image_filenames.size(), 1u);
181   }
182   CHECK(!oat_filenames.empty());
183   CHECK_EQ(image_filenames.size(), oat_filenames.size());
184 
185   {
186     ScopedObjectAccess soa(Thread::Current());
187     for (size_t i = 0; i < oat_filenames.size(); ++i) {
188       CreateHeader(i);
189       CopyAndFixupNativeData(i);
190     }
191   }
192 
193   {
194     // TODO: heap validation can't handle these fix up passes.
195     ScopedObjectAccess soa(Thread::Current());
196     Runtime::Current()->GetHeap()->DisableObjectValidation();
197     CopyAndFixupObjects();
198   }
199 
200   for (size_t i = 0; i < image_filenames.size(); ++i) {
201     const char* image_filename = image_filenames[i];
202     ImageInfo& image_info = GetImageInfo(i);
203     std::unique_ptr<File> image_file;
204     if (image_fd != kInvalidFd) {
205       if (strlen(image_filename) == 0u) {
206         image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
207         // Empty the file in case it already exists.
208         if (image_file != nullptr) {
209           TEMP_FAILURE_RETRY(image_file->SetLength(0));
210           TEMP_FAILURE_RETRY(image_file->Flush());
211         }
212       } else {
213         LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
214       }
215     } else {
216       image_file.reset(OS::CreateEmptyFile(image_filename));
217     }
218 
219     if (image_file == nullptr) {
220       LOG(ERROR) << "Failed to open image file " << image_filename;
221       return false;
222     }
223 
224     if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
225       PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
226       image_file->Erase();
227       return EXIT_FAILURE;
228     }
229 
230     std::unique_ptr<char[]> compressed_data;
231     // Image data size excludes the bitmap and the header.
232     ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
233     const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
234     char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
235     size_t data_size;
236     const char* image_data_to_write;
237     const uint64_t compress_start_time = NanoTime();
238 
239     CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
240     switch (image_storage_mode_) {
241       case ImageHeader::kStorageModeLZ4HC:  // Fall-through.
242       case ImageHeader::kStorageModeLZ4: {
243         const size_t compressed_max_size = LZ4_compressBound(image_data_size);
244         compressed_data.reset(new char[compressed_max_size]);
245         data_size = LZ4_compress_default(
246             reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
247             &compressed_data[0],
248             image_data_size,
249             compressed_max_size);
250 
251         break;
252       }
253       /*
254        * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444
255       case ImageHeader::kStorageModeLZ4HC: {
256         // Bound is same as non HC.
257         const size_t compressed_max_size = LZ4_compressBound(image_data_size);
258         compressed_data.reset(new char[compressed_max_size]);
259         data_size = LZ4_compressHC(
260             reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
261             &compressed_data[0],
262             image_data_size);
263         break;
264       }
265       */
266       case ImageHeader::kStorageModeUncompressed: {
267         data_size = image_data_size;
268         image_data_to_write = image_data;
269         break;
270       }
271       default: {
272         LOG(FATAL) << "Unsupported";
273         UNREACHABLE();
274       }
275     }
276 
277     if (compressed_data != nullptr) {
278       image_data_to_write = &compressed_data[0];
279       VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
280                      << PrettyDuration(NanoTime() - compress_start_time);
281       if (kIsDebugBuild) {
282         std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]);
283         const size_t decompressed_size = LZ4_decompress_safe(
284             reinterpret_cast<char*>(&compressed_data[0]),
285             reinterpret_cast<char*>(&temp[0]),
286             data_size,
287             image_data_size);
288         CHECK_EQ(decompressed_size, image_data_size);
289         CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_;
290       }
291     }
292 
293     // Write out the image + fields + methods.
294     const bool is_compressed = compressed_data != nullptr;
295     if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
296       PLOG(ERROR) << "Failed to write image file data " << image_filename;
297       image_file->Erase();
298       return false;
299     }
300 
301     // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
302     // convenience.
303     const ImageSection& bitmap_section = image_header->GetImageBitmapSection();
304     // Align up since data size may be unaligned if the image is compressed.
305     size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
306     if (!is_compressed) {
307       CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
308     }
309     if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
310                                  bitmap_section.Size(),
311                                  bitmap_position_in_file)) {
312       PLOG(ERROR) << "Failed to write image file " << image_filename;
313       image_file->Erase();
314       return false;
315     }
316 
317     int err = image_file->Flush();
318     if (err < 0) {
319       PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
320       image_file->Erase();
321       return false;
322     }
323 
324     // Write header last in case the compiler gets killed in the middle of image writing.
325     // We do not want to have a corrupted image with a valid header.
326     // The header is uncompressed since it contains whether the image is compressed or not.
327     image_header->data_size_ = data_size;
328     if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
329                                  sizeof(ImageHeader),
330                                  0)) {
331       PLOG(ERROR) << "Failed to write image file header " << image_filename;
332       image_file->Erase();
333       return false;
334     }
335 
336     CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
337              static_cast<size_t>(image_file->GetLength()));
338     if (image_file->FlushCloseOrErase() != 0) {
339       PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
340       return false;
341     }
342   }
343   return true;
344 }
345 
SetImageOffset(mirror::Object * object,size_t offset)346 void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
347   DCHECK(object != nullptr);
348   DCHECK_NE(offset, 0U);
349 
350   // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
351   object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
352   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
353   DCHECK(IsImageOffsetAssigned(object));
354 }
355 
UpdateImageOffset(mirror::Object * obj,uintptr_t offset)356 void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
357   DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
358   obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
359   DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
360 }
361 
AssignImageOffset(mirror::Object * object,ImageWriter::BinSlot bin_slot)362 void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
363   DCHECK(object != nullptr);
364   DCHECK_NE(image_objects_offset_begin_, 0u);
365 
366   size_t oat_index = GetOatIndex(object);
367   ImageInfo& image_info = GetImageInfo(oat_index);
368   size_t bin_slot_offset = image_info.GetBinSlotOffset(bin_slot.GetBin());
369   size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
370   DCHECK_ALIGNED(new_offset, kObjectAlignment);
371 
372   SetImageOffset(object, new_offset);
373   DCHECK_LT(new_offset, image_info.image_end_);
374 }
375 
IsImageOffsetAssigned(mirror::Object * object) const376 bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
377   // Will also return true if the bin slot was assigned since we are reusing the lock word.
378   DCHECK(object != nullptr);
379   return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
380 }
381 
GetImageOffset(mirror::Object * object) const382 size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
383   DCHECK(object != nullptr);
384   DCHECK(IsImageOffsetAssigned(object));
385   LockWord lock_word = object->GetLockWord(false);
386   size_t offset = lock_word.ForwardingAddress();
387   size_t oat_index = GetOatIndex(object);
388   const ImageInfo& image_info = GetImageInfo(oat_index);
389   DCHECK_LT(offset, image_info.image_end_);
390   return offset;
391 }
392 
SetImageBinSlot(mirror::Object * object,BinSlot bin_slot)393 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
394   DCHECK(object != nullptr);
395   DCHECK(!IsImageOffsetAssigned(object));
396   DCHECK(!IsImageBinSlotAssigned(object));
397 
398   // Before we stomp over the lock word, save the hash code for later.
399   LockWord lw(object->GetLockWord(false));
400   switch (lw.GetState()) {
401     case LockWord::kFatLocked:
402       FALLTHROUGH_INTENDED;
403     case LockWord::kThinLocked: {
404       std::ostringstream oss;
405       bool thin = (lw.GetState() == LockWord::kThinLocked);
406       oss << (thin ? "Thin" : "Fat")
407           << " locked object " << object << "(" << object->PrettyTypeOf()
408           << ") found during object copy";
409       if (thin) {
410         oss << ". Lock owner:" << lw.ThinLockOwner();
411       }
412       LOG(FATAL) << oss.str();
413       break;
414     }
415     case LockWord::kUnlocked:
416       // No hash, don't need to save it.
417       break;
418     case LockWord::kHashCode:
419       DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
420       saved_hashcode_map_.emplace(object, lw.GetHashCode());
421       break;
422     default:
423       LOG(FATAL) << "Unreachable.";
424       UNREACHABLE();
425   }
426   object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
427   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
428   DCHECK(IsImageBinSlotAssigned(object));
429 }
430 
PrepareDexCacheArraySlots()431 void ImageWriter::PrepareDexCacheArraySlots() {
432   // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
433   // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
434   // when AssignImageBinSlot() assigns their indexes out or order.
435   for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
436     auto it = dex_file_oat_index_map_.find(dex_file);
437     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
438     ImageInfo& image_info = GetImageInfo(it->second);
439     image_info.dex_cache_array_starts_.Put(
440         dex_file, image_info.GetBinSlotSize(Bin::kDexCacheArray));
441     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
442     image_info.IncrementBinSlotSize(Bin::kDexCacheArray, layout.Size());
443   }
444 
445   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
446   Thread* const self = Thread::Current();
447   ReaderMutexLock mu(self, *Locks::dex_lock_);
448   for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
449     ObjPtr<mirror::DexCache> dex_cache =
450         ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
451     if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) {
452       continue;
453     }
454     const DexFile* dex_file = dex_cache->GetDexFile();
455     CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
456         << "Dex cache should have been pruned " << dex_file->GetLocation()
457         << "; possibly in class path";
458     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
459     DCHECK(layout.Valid());
460     size_t oat_index = GetOatIndexForDexCache(dex_cache);
461     ImageInfo& image_info = GetImageInfo(oat_index);
462     uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
463     DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
464     AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
465                                start + layout.TypesOffset(),
466                                dex_cache);
467     DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
468     AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
469                                start + layout.MethodsOffset(),
470                                dex_cache);
471     DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
472     AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
473                                start + layout.FieldsOffset(),
474                                dex_cache);
475     DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
476     AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
477 
478     if (dex_cache->GetResolvedMethodTypes() != nullptr) {
479       AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
480                                  start + layout.MethodTypesOffset(),
481                                  dex_cache);
482     }
483     if (dex_cache->GetResolvedCallSites() != nullptr) {
484       AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(),
485                                  start + layout.CallSitesOffset(),
486                                  dex_cache);
487     }
488   }
489 }
490 
AddDexCacheArrayRelocation(void * array,size_t offset,ObjPtr<mirror::DexCache> dex_cache)491 void ImageWriter::AddDexCacheArrayRelocation(void* array,
492                                              size_t offset,
493                                              ObjPtr<mirror::DexCache> dex_cache) {
494   if (array != nullptr) {
495     DCHECK(!IsInBootImage(array));
496     size_t oat_index = GetOatIndexForDexCache(dex_cache);
497     native_object_relocations_.emplace(array,
498         NativeObjectRelocation { oat_index, offset, NativeObjectRelocationType::kDexCacheArray });
499   }
500 }
501 
AddMethodPointerArray(mirror::PointerArray * arr)502 void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
503   DCHECK(arr != nullptr);
504   if (kIsDebugBuild) {
505     for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
506       ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
507       if (method != nullptr && !method->IsRuntimeMethod()) {
508         mirror::Class* klass = method->GetDeclaringClass();
509         CHECK(klass == nullptr || KeepClass(klass))
510             << Class::PrettyClass(klass) << " should be a kept class";
511       }
512     }
513   }
514   // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
515   // ArtMethods.
516   pointer_arrays_.emplace(arr, Bin::kArtMethodClean);
517 }
518 
AssignImageBinSlot(mirror::Object * object,size_t oat_index)519 void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
520   DCHECK(object != nullptr);
521   size_t object_size = object->SizeOf();
522 
523   // The magic happens here. We segregate objects into different bins based
524   // on how likely they are to get dirty at runtime.
525   //
526   // Likely-to-dirty objects get packed together into the same bin so that
527   // at runtime their page dirtiness ratio (how many dirty objects a page has) is
528   // maximized.
529   //
530   // This means more pages will stay either clean or shared dirty (with zygote) and
531   // the app will use less of its own (private) memory.
532   Bin bin = Bin::kRegular;
533 
534   if (kBinObjects) {
535     //
536     // Changing the bin of an object is purely a memory-use tuning.
537     // It has no change on runtime correctness.
538     //
539     // Memory analysis has determined that the following types of objects get dirtied
540     // the most:
541     //
542     // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
543     //   a fixed layout which helps improve generated code (using PC-relative addressing),
544     //   so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
545     //   Since these arrays are huge, most pages do not overlap other objects and it's not
546     //   really important where they are for the clean/dirty separation. Due to their
547     //   special PC-relative addressing, we arbitrarily keep them at the end.
548     // * Class'es which are verified [their clinit runs only at runtime]
549     //   - classes in general [because their static fields get overwritten]
550     //   - initialized classes with all-final statics are unlikely to be ever dirty,
551     //     so bin them separately
552     // * Art Methods that are:
553     //   - native [their native entry point is not looked up until runtime]
554     //   - have declaring classes that aren't initialized
555     //            [their interpreter/quick entry points are trampolines until the class
556     //             becomes initialized]
557     //
558     // We also assume the following objects get dirtied either never or extremely rarely:
559     //  * Strings (they are immutable)
560     //  * Art methods that aren't native and have initialized declared classes
561     //
562     // We assume that "regular" bin objects are highly unlikely to become dirtied,
563     // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
564     //
565     if (object->IsClass()) {
566       bin = Bin::kClassVerified;
567       mirror::Class* klass = object->AsClass();
568 
569       // Add non-embedded vtable to the pointer array table if there is one.
570       auto* vtable = klass->GetVTable();
571       if (vtable != nullptr) {
572         AddMethodPointerArray(vtable);
573       }
574       auto* iftable = klass->GetIfTable();
575       if (iftable != nullptr) {
576         for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
577           if (iftable->GetMethodArrayCount(i) > 0) {
578             AddMethodPointerArray(iftable->GetMethodArray(i));
579           }
580         }
581       }
582 
583       // Move known dirty objects into their own sections. This includes:
584       //   - classes with dirty static fields.
585       if (dirty_image_objects_ != nullptr &&
586           dirty_image_objects_->find(klass->PrettyDescriptor()) != dirty_image_objects_->end()) {
587         bin = Bin::kKnownDirty;
588       } else if (klass->GetStatus() == ClassStatus::kInitialized) {
589         bin = Bin::kClassInitialized;
590 
591         // If the class's static fields are all final, put it into a separate bin
592         // since it's very likely it will stay clean.
593         uint32_t num_static_fields = klass->NumStaticFields();
594         if (num_static_fields == 0) {
595           bin = Bin::kClassInitializedFinalStatics;
596         } else {
597           // Maybe all the statics are final?
598           bool all_final = true;
599           for (uint32_t i = 0; i < num_static_fields; ++i) {
600             ArtField* field = klass->GetStaticField(i);
601             if (!field->IsFinal()) {
602               all_final = false;
603               break;
604             }
605           }
606 
607           if (all_final) {
608             bin = Bin::kClassInitializedFinalStatics;
609           }
610         }
611       }
612     } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
613       bin = Bin::kString;  // Strings are almost always immutable (except for object header).
614     } else if (object->GetClass<kVerifyNone>() ==
615         Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) {
616       // Instance of java lang object, probably a lock object. This means it will be dirty when we
617       // synchronize on it.
618       bin = Bin::kMiscDirty;
619     } else if (object->IsDexCache()) {
620       // Dex file field becomes dirty when the image is loaded.
621       bin = Bin::kMiscDirty;
622     }
623     // else bin = kBinRegular
624   }
625 
626   // Assign the oat index too.
627   DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
628   oat_index_map_.emplace(object, oat_index);
629 
630   ImageInfo& image_info = GetImageInfo(oat_index);
631 
632   size_t offset_delta = RoundUp(object_size, kObjectAlignment);  // 64-bit alignment
633   // How many bytes the current bin is at (aligned).
634   size_t current_offset = image_info.GetBinSlotSize(bin);
635   // Move the current bin size up to accommodate the object we just assigned a bin slot.
636   image_info.IncrementBinSlotSize(bin, offset_delta);
637 
638   BinSlot new_bin_slot(bin, current_offset);
639   SetImageBinSlot(object, new_bin_slot);
640 
641   image_info.IncrementBinSlotCount(bin, 1u);
642 
643   // Grow the image closer to the end by the object we just assigned.
644   image_info.image_end_ += offset_delta;
645 }
646 
WillMethodBeDirty(ArtMethod * m) const647 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
648   if (m->IsNative()) {
649     return true;
650   }
651   mirror::Class* declaring_class = m->GetDeclaringClass();
652   // Initialized is highly unlikely to dirty since there's no entry points to mutate.
653   return declaring_class == nullptr || declaring_class->GetStatus() != ClassStatus::kInitialized;
654 }
655 
IsImageBinSlotAssigned(mirror::Object * object) const656 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
657   DCHECK(object != nullptr);
658 
659   // We always stash the bin slot into a lockword, in the 'forwarding address' state.
660   // If it's in some other state, then we haven't yet assigned an image bin slot.
661   if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
662     return false;
663   } else if (kIsDebugBuild) {
664     LockWord lock_word = object->GetLockWord(false);
665     size_t offset = lock_word.ForwardingAddress();
666     BinSlot bin_slot(offset);
667     size_t oat_index = GetOatIndex(object);
668     const ImageInfo& image_info = GetImageInfo(oat_index);
669     DCHECK_LT(bin_slot.GetIndex(), image_info.GetBinSlotSize(bin_slot.GetBin()))
670         << "bin slot offset should not exceed the size of that bin";
671   }
672   return true;
673 }
674 
GetImageBinSlot(mirror::Object * object) const675 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
676   DCHECK(object != nullptr);
677   DCHECK(IsImageBinSlotAssigned(object));
678 
679   LockWord lock_word = object->GetLockWord(false);
680   size_t offset = lock_word.ForwardingAddress();  // TODO: ForwardingAddress should be uint32_t
681   DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
682 
683   BinSlot bin_slot(static_cast<uint32_t>(offset));
684   size_t oat_index = GetOatIndex(object);
685   const ImageInfo& image_info = GetImageInfo(oat_index);
686   DCHECK_LT(bin_slot.GetIndex(), image_info.GetBinSlotSize(bin_slot.GetBin()));
687 
688   return bin_slot;
689 }
690 
AllocMemory()691 bool ImageWriter::AllocMemory() {
692   for (ImageInfo& image_info : image_infos_) {
693     ImageSection unused_sections[ImageHeader::kSectionCount];
694     const size_t length = RoundUp(
695         image_info.CreateImageSections(unused_sections, compile_app_image_), kPageSize);
696 
697     std::string error_msg;
698     image_info.image_.reset(MemMap::MapAnonymous("image writer image",
699                                                  nullptr,
700                                                  length,
701                                                  PROT_READ | PROT_WRITE,
702                                                  false,
703                                                  false,
704                                                  &error_msg));
705     if (UNLIKELY(image_info.image_.get() == nullptr)) {
706       LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
707       return false;
708     }
709 
710     // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
711     CHECK_LE(image_info.image_end_, length);
712     image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
713         "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
714     if (image_info.image_bitmap_.get() == nullptr) {
715       LOG(ERROR) << "Failed to allocate memory for image bitmap";
716       return false;
717     }
718   }
719   return true;
720 }
721 
722 class ImageWriter::ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
723  public:
operator ()(ObjPtr<Class> c)724   bool operator()(ObjPtr<Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
725     StackHandleScope<1> hs(Thread::Current());
726     mirror::Class::ComputeName(hs.NewHandle(c));
727     return true;
728   }
729 };
730 
ComputeLazyFieldsForImageClasses()731 void ImageWriter::ComputeLazyFieldsForImageClasses() {
732   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
733   ComputeLazyFieldsForClassesVisitor visitor;
734   class_linker->VisitClassesWithoutClassesLock(&visitor);
735 }
736 
IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)737 static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
738     REQUIRES_SHARED(Locks::mutator_lock_) {
739   return klass->GetClassLoader() == nullptr;
740 }
741 
IsBootClassLoaderNonImageClass(mirror::Class * klass)742 bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
743   return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
744 }
745 
746 // This visitor follows the references of an instance, recursively then prune this class
747 // if a type of any field is pruned.
748 class ImageWriter::PruneObjectReferenceVisitor {
749  public:
PruneObjectReferenceVisitor(ImageWriter * image_writer,bool * early_exit,std::unordered_set<mirror::Object * > * visited,bool * result)750   PruneObjectReferenceVisitor(ImageWriter* image_writer,
751                         bool* early_exit,
752                         std::unordered_set<mirror::Object*>* visited,
753                         bool* result)
754       : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {}
755 
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const756   ALWAYS_INLINE void VisitRootIfNonNull(
757       mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
758       REQUIRES_SHARED(Locks::mutator_lock_) { }
759 
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const760   ALWAYS_INLINE void VisitRoot(
761       mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
762       REQUIRES_SHARED(Locks::mutator_lock_) { }
763 
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const764   ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
765                                  MemberOffset offset,
766                                  bool is_static ATTRIBUTE_UNUSED) const
767       REQUIRES_SHARED(Locks::mutator_lock_) {
768     mirror::Object* ref =
769         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
770     if (ref == nullptr || visited_->find(ref) != visited_->end()) {
771       return;
772     }
773 
774     ObjPtr<mirror::Class> klass = ref->IsClass() ? ref->AsClass() : ref->GetClass();
775     if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
776       // Prune all classes using reflection because the content they held will not be fixup.
777       *result_ = true;
778     }
779 
780     if (ref->IsClass()) {
781       *result_ = *result_ ||
782           image_writer_->PruneAppImageClassInternal(ref->AsClass(), early_exit_, visited_);
783     } else {
784       // Record the object visited in case of circular reference.
785       visited_->emplace(ref);
786       *result_ = *result_ ||
787           image_writer_->PruneAppImageClassInternal(klass, early_exit_, visited_);
788       ref->VisitReferences(*this, *this);
789       // Clean up before exit for next call of this function.
790       visited_->erase(ref);
791     }
792   }
793 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const794   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
795                                  ObjPtr<mirror::Reference> ref) const
796       REQUIRES_SHARED(Locks::mutator_lock_) {
797     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
798   }
799 
GetResult() const800   ALWAYS_INLINE bool GetResult() const {
801     return result_;
802   }
803 
804  private:
805   ImageWriter* image_writer_;
806   bool* early_exit_;
807   std::unordered_set<mirror::Object*>* visited_;
808   bool* const result_;
809 };
810 
811 
PruneAppImageClass(ObjPtr<mirror::Class> klass)812 bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) {
813   bool early_exit = false;
814   std::unordered_set<mirror::Object*> visited;
815   return PruneAppImageClassInternal(klass, &early_exit, &visited);
816 }
817 
PruneAppImageClassInternal(ObjPtr<mirror::Class> klass,bool * early_exit,std::unordered_set<mirror::Object * > * visited)818 bool ImageWriter::PruneAppImageClassInternal(
819     ObjPtr<mirror::Class> klass,
820     bool* early_exit,
821     std::unordered_set<mirror::Object*>* visited) {
822   DCHECK(early_exit != nullptr);
823   DCHECK(visited != nullptr);
824   DCHECK(compile_app_image_);
825   if (klass == nullptr || IsInBootImage(klass.Ptr())) {
826     return false;
827   }
828   auto found = prune_class_memo_.find(klass.Ptr());
829   if (found != prune_class_memo_.end()) {
830     // Already computed, return the found value.
831     return found->second;
832   }
833   // Circular dependencies, return false but do not store the result in the memoization table.
834   if (visited->find(klass.Ptr()) != visited->end()) {
835     *early_exit = true;
836     return false;
837   }
838   visited->emplace(klass.Ptr());
839   bool result = IsBootClassLoaderClass(klass);
840   std::string temp;
841   // Prune if not an image class, this handles any broken sets of image classes such as having a
842   // class in the set but not it's superclass.
843   result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
844   bool my_early_exit = false;  // Only for ourselves, ignore caller.
845   // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
846   // app image.
847   if (klass->IsErroneous()) {
848     result = true;
849   } else {
850     ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
851     CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass();
852   }
853   if (!result) {
854     // Check interfaces since these wont be visited through VisitReferences.)
855     mirror::IfTable* if_table = klass->GetIfTable();
856     for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
857       result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
858                                                     &my_early_exit,
859                                                     visited);
860     }
861   }
862   if (klass->IsObjectArrayClass()) {
863     result = result || PruneAppImageClassInternal(klass->GetComponentType(),
864                                                   &my_early_exit,
865                                                   visited);
866   }
867   // Check static fields and their classes.
868   if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
869     size_t num_static_fields = klass->NumReferenceStaticFields();
870     // Presumably GC can happen when we are cross compiling, it should not cause performance
871     // problems to do pointer size logic.
872     MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
873         Runtime::Current()->GetClassLinker()->GetImagePointerSize());
874     for (size_t i = 0u; i < num_static_fields; ++i) {
875       mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
876       if (ref != nullptr) {
877         if (ref->IsClass()) {
878           result = result || PruneAppImageClassInternal(ref->AsClass(),
879                                                         &my_early_exit,
880                                                         visited);
881         } else {
882           mirror::Class* type = ref->GetClass();
883           result = result || PruneAppImageClassInternal(type,
884                                                         &my_early_exit,
885                                                         visited);
886           if (!result) {
887             // For non-class case, also go through all the types mentioned by it's fields'
888             // references recursively to decide whether to keep this class.
889             bool tmp = false;
890             PruneObjectReferenceVisitor visitor(this, &my_early_exit, visited, &tmp);
891             ref->VisitReferences(visitor, visitor);
892             result = result || tmp;
893           }
894         }
895       }
896       field_offset = MemberOffset(field_offset.Uint32Value() +
897                                   sizeof(mirror::HeapReference<mirror::Object>));
898     }
899   }
900   result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
901                                                 &my_early_exit,
902                                                 visited);
903   // Remove the class if the dex file is not in the set of dex files. This happens for classes that
904   // are from uses-library if there is no profile. b/30688277
905   mirror::DexCache* dex_cache = klass->GetDexCache();
906   if (dex_cache != nullptr) {
907     result = result ||
908         dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
909   }
910   // Erase the element we stored earlier since we are exiting the function.
911   auto it = visited->find(klass.Ptr());
912   DCHECK(it != visited->end());
913   visited->erase(it);
914   // Only store result if it is true or none of the calls early exited due to circular
915   // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
916   // a child call and we can remember the result.
917   if (result == true || !my_early_exit || visited->empty()) {
918     prune_class_memo_[klass.Ptr()] = result;
919   }
920   *early_exit |= my_early_exit;
921   return result;
922 }
923 
KeepClass(ObjPtr<mirror::Class> klass)924 bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
925   if (klass == nullptr) {
926     return false;
927   }
928   if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
929     // Already in boot image, return true.
930     return true;
931   }
932   std::string temp;
933   if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
934     return false;
935   }
936   if (compile_app_image_) {
937     // For app images, we need to prune boot loader classes that are not in the boot image since
938     // these may have already been loaded when the app image is loaded.
939     // Keep classes in the boot image space since we don't want to re-resolve these.
940     return !PruneAppImageClass(klass);
941   }
942   return true;
943 }
944 
945 class ImageWriter::PruneClassesVisitor : public ClassVisitor {
946  public:
PruneClassesVisitor(ImageWriter * image_writer,ObjPtr<mirror::ClassLoader> class_loader)947   PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
948       : image_writer_(image_writer),
949         class_loader_(class_loader),
950         classes_to_prune_(),
951         defined_class_count_(0u) { }
952 
operator ()(ObjPtr<mirror::Class> klass)953   bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
954     if (!image_writer_->KeepClass(klass.Ptr())) {
955       classes_to_prune_.insert(klass.Ptr());
956       if (klass->GetClassLoader() == class_loader_) {
957         ++defined_class_count_;
958       }
959     }
960     return true;
961   }
962 
Prune()963   size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
964     ClassTable* class_table =
965         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
966     for (mirror::Class* klass : classes_to_prune_) {
967       std::string storage;
968       const char* descriptor = klass->GetDescriptor(&storage);
969       bool result = class_table->Remove(descriptor);
970       DCHECK(result);
971       DCHECK(!class_table->Remove(descriptor)) << descriptor;
972     }
973     return defined_class_count_;
974   }
975 
976  private:
977   ImageWriter* const image_writer_;
978   const ObjPtr<mirror::ClassLoader> class_loader_;
979   std::unordered_set<mirror::Class*> classes_to_prune_;
980   size_t defined_class_count_;
981 };
982 
983 class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
984  public:
PruneClassLoaderClassesVisitor(ImageWriter * image_writer)985   explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
986       : image_writer_(image_writer), removed_class_count_(0) {}
987 
Visit(ObjPtr<mirror::ClassLoader> class_loader)988   virtual void Visit(ObjPtr<mirror::ClassLoader> class_loader) OVERRIDE
989       REQUIRES_SHARED(Locks::mutator_lock_) {
990     PruneClassesVisitor classes_visitor(image_writer_, class_loader);
991     ClassTable* class_table =
992         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
993     class_table->Visit(classes_visitor);
994     removed_class_count_ += classes_visitor.Prune();
995 
996     // Record app image class loader. The fake boot class loader should not get registered
997     // and we should end up with only one class loader for an app and none for boot image.
998     if (class_loader != nullptr && class_table != nullptr) {
999       DCHECK(class_loader_ == nullptr);
1000       class_loader_ = class_loader;
1001     }
1002   }
1003 
GetRemovedClassCount() const1004   size_t GetRemovedClassCount() const {
1005     return removed_class_count_;
1006   }
1007 
GetClassLoader() const1008   ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) {
1009     return class_loader_;
1010   }
1011 
1012  private:
1013   ImageWriter* const image_writer_;
1014   size_t removed_class_count_;
1015   ObjPtr<mirror::ClassLoader> class_loader_;
1016 };
1017 
VisitClassLoaders(ClassLoaderVisitor * visitor)1018 void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
1019   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1020   visitor->Visit(nullptr);  // Visit boot class loader.
1021   Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
1022 }
1023 
PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,ObjPtr<mirror::ClassLoader> class_loader)1024 void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
1025                                           ObjPtr<mirror::ClassLoader> class_loader) {
1026   // To ensure deterministic contents of the hash-based arrays, each slot shall contain
1027   // the candidate with the lowest index. As we're processing entries in increasing index
1028   // order, this means trying to look up the entry for the current index if the slot is
1029   // empty or if it contains a higher index.
1030 
1031   Runtime* runtime = Runtime::Current();
1032   ClassLinker* class_linker = runtime->GetClassLinker();
1033   const DexFile& dex_file = *dex_cache->GetDexFile();
1034   // Prune methods.
1035   mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
1036   dex::TypeIndex last_class_idx;  // Initialized to invalid index.
1037   ObjPtr<mirror::Class> last_class = nullptr;
1038   for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) {
1039     uint32_t slot_idx = dex_cache->MethodSlotIndex(i);
1040     auto pair =
1041         mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
1042     uint32_t stored_index = pair.index;
1043     ArtMethod* method = pair.object;
1044     if (method != nullptr && i > stored_index) {
1045       continue;  // Already checked.
1046     }
1047     // Check if the referenced class is in the image. Note that we want to check the referenced
1048     // class rather than the declaring class to preserve the semantics, i.e. using a MethodId
1049     // results in resolving the referenced class and that can for example throw OOME.
1050     const DexFile::MethodId& method_id = dex_file.GetMethodId(i);
1051     if (method_id.class_idx_ != last_class_idx) {
1052       last_class_idx = method_id.class_idx_;
1053       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
1054       if (last_class != nullptr && !KeepClass(last_class)) {
1055         last_class = nullptr;
1056       }
1057     }
1058     if (method == nullptr || i < stored_index) {
1059       if (last_class != nullptr) {
1060         // Try to resolve the method with the class linker, which will insert
1061         // it into the dex cache if successful.
1062         method = class_linker->FindResolvedMethod(last_class, dex_cache, class_loader, i);
1063         // If the referenced class is in the image, the defining class must also be there.
1064         DCHECK(method == nullptr || KeepClass(method->GetDeclaringClass()));
1065         DCHECK(method == nullptr || dex_cache->GetResolvedMethod(i, target_ptr_size_) == method);
1066       }
1067     } else {
1068       DCHECK_EQ(i, stored_index);
1069       if (last_class == nullptr) {
1070         dex_cache->ClearResolvedMethod(stored_index, target_ptr_size_);
1071       }
1072     }
1073   }
1074   // Prune fields and make the contents of the field array deterministic.
1075   mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
1076   last_class_idx = dex::TypeIndex();  // Initialized to invalid index.
1077   last_class = nullptr;
1078   for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
1079     uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
1080     auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
1081     uint32_t stored_index = pair.index;
1082     ArtField* field = pair.object;
1083     if (field != nullptr && i > stored_index) {
1084       continue;  // Already checked.
1085     }
1086     // Check if the referenced class is in the image. Note that we want to check the referenced
1087     // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
1088     // results in resolving the referenced class and that can for example throw OOME.
1089     const DexFile::FieldId& field_id = dex_file.GetFieldId(i);
1090     if (field_id.class_idx_ != last_class_idx) {
1091       last_class_idx = field_id.class_idx_;
1092       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
1093       if (last_class != nullptr && !KeepClass(last_class)) {
1094         last_class = nullptr;
1095       }
1096     }
1097     if (field == nullptr || i < stored_index) {
1098       if (last_class != nullptr) {
1099         field = class_linker->FindResolvedFieldJLS(last_class, dex_cache, class_loader, i);
1100         // If the referenced class is in the image, the defining class must also be there.
1101         DCHECK(field == nullptr || KeepClass(field->GetDeclaringClass()));
1102         DCHECK(field == nullptr || dex_cache->GetResolvedField(i, target_ptr_size_) == field);
1103       }
1104     } else {
1105       DCHECK_EQ(i, stored_index);
1106       if (last_class == nullptr) {
1107         dex_cache->ClearResolvedField(stored_index, target_ptr_size_);
1108       }
1109     }
1110   }
1111   // Prune types and make the contents of the type array deterministic.
1112   // This is done after fields and methods as their lookup can touch the types array.
1113   for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
1114     dex::TypeIndex type_idx(i);
1115     uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
1116     mirror::TypeDexCachePair pair =
1117         dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
1118     uint32_t stored_index = pair.index;
1119     ObjPtr<mirror::Class> klass = pair.object.Read();
1120     if (klass == nullptr || i < stored_index) {
1121       klass = class_linker->LookupResolvedType(type_idx, dex_cache, class_loader);
1122       if (klass != nullptr) {
1123         DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass);
1124         stored_index = i;  // For correct clearing below if not keeping the `klass`.
1125       }
1126     } else if (i == stored_index && !KeepClass(klass)) {
1127       dex_cache->ClearResolvedType(dex::TypeIndex(stored_index));
1128     }
1129   }
1130   // Strings do not need pruning, but the contents of the string array must be deterministic.
1131   for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
1132     dex::StringIndex string_idx(i);
1133     uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
1134     mirror::StringDexCachePair pair =
1135         dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
1136     uint32_t stored_index = pair.index;
1137     ObjPtr<mirror::String> string = pair.object.Read();
1138     if (string == nullptr || i < stored_index) {
1139       string = class_linker->LookupString(string_idx, dex_cache);
1140       DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
1141     }
1142   }
1143 }
1144 
PruneNonImageClasses()1145 void ImageWriter::PruneNonImageClasses() {
1146   Runtime* runtime = Runtime::Current();
1147   ClassLinker* class_linker = runtime->GetClassLinker();
1148   Thread* self = Thread::Current();
1149   ScopedAssertNoThreadSuspension sa(__FUNCTION__);
1150 
1151   // Prune uses-library dex caches. Only prune the uses-library dex caches since we want to make
1152   // sure the other ones don't get unloaded before the OatWriter runs.
1153   class_linker->VisitClassTables(
1154       [&](ClassTable* table) REQUIRES_SHARED(Locks::mutator_lock_) {
1155     table->RemoveStrongRoots(
1156         [&](GcRoot<mirror::Object> root) REQUIRES_SHARED(Locks::mutator_lock_) {
1157       ObjPtr<mirror::Object> obj = root.Read();
1158       if (obj->IsDexCache()) {
1159         // Return true if the dex file is not one of the ones in the map.
1160         return dex_file_oat_index_map_.find(obj->AsDexCache()->GetDexFile()) ==
1161             dex_file_oat_index_map_.end();
1162       }
1163       // Return false to avoid removing.
1164       return false;
1165     });
1166   });
1167 
1168   // Remove the undesired classes from the class roots.
1169   ObjPtr<mirror::ClassLoader> class_loader;
1170   {
1171     PruneClassLoaderClassesVisitor class_loader_visitor(this);
1172     VisitClassLoaders(&class_loader_visitor);
1173     VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
1174     class_loader = class_loader_visitor.GetClassLoader();
1175     DCHECK_EQ(class_loader != nullptr, compile_app_image_);
1176   }
1177 
1178   // Clear references to removed classes from the DexCaches.
1179   std::vector<ObjPtr<mirror::DexCache>> dex_caches;
1180   {
1181     ReaderMutexLock mu2(self, *Locks::dex_lock_);
1182     dex_caches.reserve(class_linker->GetDexCachesData().size());
1183     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1184       if (self->IsJWeakCleared(data.weak_root)) {
1185         continue;
1186       }
1187       dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
1188     }
1189   }
1190   for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
1191     // Pass the class loader associated with the DexCache. This can either be
1192     // the app's `class_loader` or `nullptr` if boot class loader.
1193     PruneAndPreloadDexCache(dex_cache, IsInBootImage(dex_cache.Ptr()) ? nullptr : class_loader);
1194   }
1195 
1196   // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
1197   class_linker->DropFindArrayClassCache();
1198 
1199   // Clear to save RAM.
1200   prune_class_memo_.clear();
1201 }
1202 
CheckNonImageClassesRemoved()1203 void ImageWriter::CheckNonImageClassesRemoved() {
1204   if (compiler_driver_.GetImageClasses() != nullptr) {
1205     auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1206       if (obj->IsClass() && !IsInBootImage(obj)) {
1207         Class* klass = obj->AsClass();
1208         if (!KeepClass(klass)) {
1209           DumpImageClasses();
1210           std::string temp;
1211           CHECK(KeepClass(klass))
1212               << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass);
1213         }
1214       }
1215     };
1216     gc::Heap* heap = Runtime::Current()->GetHeap();
1217     heap->VisitObjects(visitor);
1218   }
1219 }
1220 
DumpImageClasses()1221 void ImageWriter::DumpImageClasses() {
1222   auto image_classes = compiler_driver_.GetImageClasses();
1223   CHECK(image_classes != nullptr);
1224   for (const std::string& image_class : *image_classes) {
1225     LOG(INFO) << " " << image_class;
1226   }
1227 }
1228 
FindInternedString(mirror::String * string)1229 mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
1230   Thread* const self = Thread::Current();
1231   for (const ImageInfo& image_info : image_infos_) {
1232     ObjPtr<mirror::String> const found = image_info.intern_table_->LookupStrong(self, string);
1233     DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
1234         << string->ToModifiedUtf8();
1235     if (found != nullptr) {
1236       return found.Ptr();
1237     }
1238   }
1239   if (compile_app_image_) {
1240     Runtime* const runtime = Runtime::Current();
1241     ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string);
1242     // If we found it in the runtime intern table it could either be in the boot image or interned
1243     // during app image compilation. If it was in the boot image return that, otherwise return null
1244     // since it belongs to another image space.
1245     if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) {
1246       return found.Ptr();
1247     }
1248     DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
1249         << string->ToModifiedUtf8();
1250   }
1251   return nullptr;
1252 }
1253 
1254 
CreateImageRoots(size_t oat_index) const1255 ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
1256   Runtime* runtime = Runtime::Current();
1257   ClassLinker* class_linker = runtime->GetClassLinker();
1258   Thread* self = Thread::Current();
1259   StackHandleScope<3> hs(self);
1260   Handle<Class> object_array_class(hs.NewHandle(
1261       class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
1262 
1263   std::unordered_set<const DexFile*> image_dex_files;
1264   for (auto& pair : dex_file_oat_index_map_) {
1265     const DexFile* image_dex_file = pair.first;
1266     size_t image_oat_index = pair.second;
1267     if (oat_index == image_oat_index) {
1268       image_dex_files.insert(image_dex_file);
1269     }
1270   }
1271 
1272   // build an Object[] of all the DexCaches used in the source_space_.
1273   // Since we can't hold the dex lock when allocating the dex_caches
1274   // ObjectArray, we lock the dex lock twice, first to get the number
1275   // of dex caches first and then lock it again to copy the dex
1276   // caches. We check that the number of dex caches does not change.
1277   size_t dex_cache_count = 0;
1278   {
1279     ReaderMutexLock mu(self, *Locks::dex_lock_);
1280     // Count number of dex caches not in the boot image.
1281     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1282       ObjPtr<mirror::DexCache> dex_cache =
1283           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1284       if (dex_cache == nullptr) {
1285         continue;
1286       }
1287       const DexFile* dex_file = dex_cache->GetDexFile();
1288       if (!IsInBootImage(dex_cache.Ptr())) {
1289         dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1290       }
1291     }
1292   }
1293   Handle<ObjectArray<Object>> dex_caches(
1294       hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
1295   CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array.";
1296   {
1297     ReaderMutexLock mu(self, *Locks::dex_lock_);
1298     size_t non_image_dex_caches = 0;
1299     // Re-count number of non image dex caches.
1300     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1301       ObjPtr<mirror::DexCache> dex_cache =
1302           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1303       if (dex_cache == nullptr) {
1304         continue;
1305       }
1306       const DexFile* dex_file = dex_cache->GetDexFile();
1307       if (!IsInBootImage(dex_cache.Ptr())) {
1308         non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1309       }
1310     }
1311     CHECK_EQ(dex_cache_count, non_image_dex_caches)
1312         << "The number of non-image dex caches changed.";
1313     size_t i = 0;
1314     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1315       ObjPtr<mirror::DexCache> dex_cache =
1316           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
1317       if (dex_cache == nullptr) {
1318         continue;
1319       }
1320       const DexFile* dex_file = dex_cache->GetDexFile();
1321       if (!IsInBootImage(dex_cache.Ptr()) &&
1322           image_dex_files.find(dex_file) != image_dex_files.end()) {
1323         dex_caches->Set<false>(i, dex_cache.Ptr());
1324         ++i;
1325       }
1326     }
1327   }
1328 
1329   // build an Object[] of the roots needed to restore the runtime
1330   int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compile_app_image_);
1331   auto image_roots(hs.NewHandle(
1332       ObjectArray<Object>::Alloc(self, object_array_class.Get(), image_roots_size)));
1333   image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
1334   image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
1335   // image_roots[ImageHeader::kClassLoader] will be set later for app image.
1336   static_assert(ImageHeader::kClassLoader + 1u == ImageHeader::kImageRootsMax,
1337                 "Class loader should be the last image root.");
1338   for (int32_t i = 0; i < ImageHeader::kImageRootsMax - 1; ++i) {
1339     CHECK(image_roots->Get(i) != nullptr);
1340   }
1341   return image_roots.Get();
1342 }
1343 
TryAssignBinSlot(WorkStack & work_stack,mirror::Object * obj,size_t oat_index)1344 mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
1345                                               mirror::Object* obj,
1346                                               size_t oat_index) {
1347   if (obj == nullptr || IsInBootImage(obj)) {
1348     // Object is null or already in the image, there is no work to do.
1349     return obj;
1350   }
1351   if (!IsImageBinSlotAssigned(obj)) {
1352     // We want to intern all strings but also assign offsets for the source string. Since the
1353     // pruning phase has already happened, if we intern a string to one in the image we still
1354     // end up copying an unreachable string.
1355     if (obj->IsString()) {
1356       // Need to check if the string is already interned in another image info so that we don't have
1357       // the intern tables of two different images contain the same string.
1358       mirror::String* interned = FindInternedString(obj->AsString());
1359       if (interned == nullptr) {
1360         // Not in another image space, insert to our table.
1361         interned =
1362             GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr();
1363         DCHECK_EQ(interned, obj);
1364       }
1365     } else if (obj->IsDexCache()) {
1366       oat_index = GetOatIndexForDexCache(obj->AsDexCache());
1367     } else if (obj->IsClass()) {
1368       // Visit and assign offsets for fields and field arrays.
1369       mirror::Class* as_klass = obj->AsClass();
1370       mirror::DexCache* dex_cache = as_klass->GetDexCache();
1371       DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus();
1372       if (compile_app_image_) {
1373         // Extra sanity, no boot loader classes should be left!
1374         CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass();
1375       }
1376       LengthPrefixedArray<ArtField>* fields[] = {
1377           as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1378       };
1379       // Overwrite the oat index value since the class' dex cache is more accurate of where it
1380       // belongs.
1381       oat_index = GetOatIndexForDexCache(dex_cache);
1382       ImageInfo& image_info = GetImageInfo(oat_index);
1383       if (!compile_app_image_) {
1384         // Note: Avoid locking to prevent lock order violations from root visiting;
1385         // image_info.class_table_ is only accessed from the image writer.
1386         image_info.class_table_->InsertWithoutLocks(as_klass);
1387       }
1388       for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1389         // Total array length including header.
1390         if (cur_fields != nullptr) {
1391           const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1392           // Forward the entire array at once.
1393           auto it = native_object_relocations_.find(cur_fields);
1394           CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1395                                                   << " already forwarded";
1396           size_t offset = image_info.GetBinSlotSize(Bin::kArtField);
1397           DCHECK(!IsInBootImage(cur_fields));
1398           native_object_relocations_.emplace(
1399               cur_fields,
1400               NativeObjectRelocation {
1401                   oat_index, offset, NativeObjectRelocationType::kArtFieldArray
1402               });
1403           offset += header_size;
1404           // Forward individual fields so that we can quickly find where they belong.
1405           for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
1406             // Need to forward arrays separate of fields.
1407             ArtField* field = &cur_fields->At(i);
1408             auto it2 = native_object_relocations_.find(field);
1409             CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
1410                 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic();
1411             DCHECK(!IsInBootImage(field));
1412             native_object_relocations_.emplace(
1413                 field,
1414                 NativeObjectRelocation { oat_index,
1415                                          offset,
1416                                          NativeObjectRelocationType::kArtField });
1417             offset += sizeof(ArtField);
1418           }
1419           image_info.IncrementBinSlotSize(
1420               Bin::kArtField, header_size + cur_fields->size() * sizeof(ArtField));
1421           DCHECK_EQ(offset, image_info.GetBinSlotSize(Bin::kArtField));
1422         }
1423       }
1424       // Visit and assign offsets for methods.
1425       size_t num_methods = as_klass->NumMethods();
1426       if (num_methods != 0) {
1427         bool any_dirty = false;
1428         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1429           if (WillMethodBeDirty(&m)) {
1430             any_dirty = true;
1431             break;
1432           }
1433         }
1434         NativeObjectRelocationType type = any_dirty
1435             ? NativeObjectRelocationType::kArtMethodDirty
1436             : NativeObjectRelocationType::kArtMethodClean;
1437         Bin bin_type = BinTypeForNativeRelocationType(type);
1438         // Forward the entire array at once, but header first.
1439         const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1440         const size_t method_size = ArtMethod::Size(target_ptr_size_);
1441         const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1442                                                                                method_size,
1443                                                                                method_alignment);
1444         LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
1445         auto it = native_object_relocations_.find(array);
1446         CHECK(it == native_object_relocations_.end())
1447             << "Method array " << array << " already forwarded";
1448         size_t offset = image_info.GetBinSlotSize(bin_type);
1449         DCHECK(!IsInBootImage(array));
1450         native_object_relocations_.emplace(array,
1451             NativeObjectRelocation {
1452                 oat_index,
1453                 offset,
1454                 any_dirty ? NativeObjectRelocationType::kArtMethodArrayDirty
1455                           : NativeObjectRelocationType::kArtMethodArrayClean });
1456         image_info.IncrementBinSlotSize(bin_type, header_size);
1457         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1458           AssignMethodOffset(&m, type, oat_index);
1459         }
1460         (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
1461       }
1462       // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
1463       // live.
1464       if (as_klass->ShouldHaveImt()) {
1465         ImTable* imt = as_klass->GetImt(target_ptr_size_);
1466         if (TryAssignImTableOffset(imt, oat_index)) {
1467           // Since imt's can be shared only do this the first time to not double count imt method
1468           // fixups.
1469           for (size_t i = 0; i < ImTable::kSize; ++i) {
1470             ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
1471             DCHECK(imt_method != nullptr);
1472             if (imt_method->IsRuntimeMethod() &&
1473                 !IsInBootImage(imt_method) &&
1474                 !NativeRelocationAssigned(imt_method)) {
1475               AssignMethodOffset(imt_method, NativeObjectRelocationType::kRuntimeMethod, oat_index);
1476             }
1477           }
1478         }
1479       }
1480     } else if (obj->IsClassLoader()) {
1481       // Register the class loader if it has a class table.
1482       // The fake boot class loader should not get registered and we should end up with only one
1483       // class loader.
1484       mirror::ClassLoader* class_loader = obj->AsClassLoader();
1485       if (class_loader->GetClassTable() != nullptr) {
1486         DCHECK(compile_app_image_);
1487         DCHECK(class_loaders_.empty());
1488         class_loaders_.insert(class_loader);
1489         ImageInfo& image_info = GetImageInfo(oat_index);
1490         // Note: Avoid locking to prevent lock order violations from root visiting;
1491         // image_info.class_table_ table is only accessed from the image writer
1492         // and class_loader->GetClassTable() is iterated but not modified.
1493         image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable());
1494       }
1495     }
1496     AssignImageBinSlot(obj, oat_index);
1497     work_stack.emplace(obj, oat_index);
1498   }
1499   if (obj->IsString()) {
1500     // Always return the interned string if there exists one.
1501     mirror::String* interned = FindInternedString(obj->AsString());
1502     if (interned != nullptr) {
1503       return interned;
1504     }
1505   }
1506   return obj;
1507 }
1508 
NativeRelocationAssigned(void * ptr) const1509 bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
1510   return native_object_relocations_.find(ptr) != native_object_relocations_.end();
1511 }
1512 
TryAssignImTableOffset(ImTable * imt,size_t oat_index)1513 bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
1514   // No offset, or already assigned.
1515   if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
1516     return false;
1517   }
1518   // If the method is a conflict method we also want to assign the conflict table offset.
1519   ImageInfo& image_info = GetImageInfo(oat_index);
1520   const size_t size = ImTable::SizeInBytes(target_ptr_size_);
1521   native_object_relocations_.emplace(
1522       imt,
1523       NativeObjectRelocation {
1524           oat_index,
1525           image_info.GetBinSlotSize(Bin::kImTable),
1526           NativeObjectRelocationType::kIMTable});
1527   image_info.IncrementBinSlotSize(Bin::kImTable, size);
1528   return true;
1529 }
1530 
TryAssignConflictTableOffset(ImtConflictTable * table,size_t oat_index)1531 void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
1532   // No offset, or already assigned.
1533   if (table == nullptr || NativeRelocationAssigned(table)) {
1534     return;
1535   }
1536   CHECK(!IsInBootImage(table));
1537   // If the method is a conflict method we also want to assign the conflict table offset.
1538   ImageInfo& image_info = GetImageInfo(oat_index);
1539   const size_t size = table->ComputeSize(target_ptr_size_);
1540   native_object_relocations_.emplace(
1541       table,
1542       NativeObjectRelocation {
1543           oat_index,
1544           image_info.GetBinSlotSize(Bin::kIMTConflictTable),
1545           NativeObjectRelocationType::kIMTConflictTable});
1546   image_info.IncrementBinSlotSize(Bin::kIMTConflictTable, size);
1547 }
1548 
AssignMethodOffset(ArtMethod * method,NativeObjectRelocationType type,size_t oat_index)1549 void ImageWriter::AssignMethodOffset(ArtMethod* method,
1550                                      NativeObjectRelocationType type,
1551                                      size_t oat_index) {
1552   DCHECK(!IsInBootImage(method));
1553   CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
1554       << ArtMethod::PrettyMethod(method);
1555   if (method->IsRuntimeMethod()) {
1556     TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
1557   }
1558   ImageInfo& image_info = GetImageInfo(oat_index);
1559   Bin bin_type = BinTypeForNativeRelocationType(type);
1560   size_t offset = image_info.GetBinSlotSize(bin_type);
1561   native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
1562   image_info.IncrementBinSlotSize(bin_type, ArtMethod::Size(target_ptr_size_));
1563 }
1564 
UnbinObjectsIntoOffset(mirror::Object * obj)1565 void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
1566   DCHECK(!IsInBootImage(obj));
1567   CHECK(obj != nullptr);
1568 
1569   // We know the bin slot, and the total bin sizes for all objects by now,
1570   // so calculate the object's final image offset.
1571 
1572   DCHECK(IsImageBinSlotAssigned(obj));
1573   BinSlot bin_slot = GetImageBinSlot(obj);
1574   // Change the lockword from a bin slot into an offset
1575   AssignImageOffset(obj, bin_slot);
1576 }
1577 
1578 class ImageWriter::VisitReferencesVisitor {
1579  public:
VisitReferencesVisitor(ImageWriter * image_writer,WorkStack * work_stack,size_t oat_index)1580   VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
1581       : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
1582 
1583   // Fix up separately since we also need to fix up method entrypoints.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const1584   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1585       REQUIRES_SHARED(Locks::mutator_lock_) {
1586     if (!root->IsNull()) {
1587       VisitRoot(root);
1588     }
1589   }
1590 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const1591   ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1592       REQUIRES_SHARED(Locks::mutator_lock_) {
1593     root->Assign(VisitReference(root->AsMirrorPtr()));
1594   }
1595 
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const1596   ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
1597                                  MemberOffset offset,
1598                                  bool is_static ATTRIBUTE_UNUSED) const
1599       REQUIRES_SHARED(Locks::mutator_lock_) {
1600     mirror::Object* ref =
1601         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1602     obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
1603   }
1604 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1605   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1606                                  ObjPtr<mirror::Reference> ref) const
1607       REQUIRES_SHARED(Locks::mutator_lock_) {
1608     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1609   }
1610 
1611  private:
VisitReference(mirror::Object * ref) const1612   mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
1613     return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
1614   }
1615 
1616   ImageWriter* const image_writer_;
1617   WorkStack* const work_stack_;
1618   const size_t oat_index_;
1619 };
1620 
1621 class ImageWriter::GetRootsVisitor : public RootVisitor  {
1622  public:
GetRootsVisitor(std::vector<mirror::Object * > * roots)1623   explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
1624 
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)1625   void VisitRoots(mirror::Object*** roots,
1626                   size_t count,
1627                   const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1628       REQUIRES_SHARED(Locks::mutator_lock_) {
1629     for (size_t i = 0; i < count; ++i) {
1630       roots_->push_back(*roots[i]);
1631     }
1632   }
1633 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)1634   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
1635                   size_t count,
1636                   const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE
1637       REQUIRES_SHARED(Locks::mutator_lock_) {
1638     for (size_t i = 0; i < count; ++i) {
1639       roots_->push_back(roots[i]->AsMirrorPtr());
1640     }
1641   }
1642 
1643  private:
1644   std::vector<mirror::Object*>* const roots_;
1645 };
1646 
ProcessWorkStack(WorkStack * work_stack)1647 void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
1648   while (!work_stack->empty()) {
1649     std::pair<mirror::Object*, size_t> pair(work_stack->top());
1650     work_stack->pop();
1651     VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
1652     // Walk references and assign bin slots for them.
1653     pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1654         visitor,
1655         visitor);
1656   }
1657 }
1658 
CalculateNewObjectOffsets()1659 void ImageWriter::CalculateNewObjectOffsets() {
1660   Thread* const self = Thread::Current();
1661   VariableSizedHandleScope handles(self);
1662   std::vector<Handle<ObjectArray<Object>>> image_roots;
1663   for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1664     image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
1665   }
1666 
1667   Runtime* const runtime = Runtime::Current();
1668   gc::Heap* const heap = runtime->GetHeap();
1669 
1670   // Leave space for the header, but do not write it yet, we need to
1671   // know where image_roots is going to end up
1672   image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment);  // 64-bit-alignment
1673 
1674   const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1675   // Write the image runtime methods.
1676   image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1677   image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1678   image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
1679   image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
1680       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves);
1681   image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
1682       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly);
1683   image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
1684       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
1685   image_methods_[ImageHeader::kSaveEverythingMethod] =
1686       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything);
1687   image_methods_[ImageHeader::kSaveEverythingMethodForClinit] =
1688       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit);
1689   image_methods_[ImageHeader::kSaveEverythingMethodForSuspendCheck] =
1690       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck);
1691   // Visit image methods first to have the main runtime methods in the first image.
1692   for (auto* m : image_methods_) {
1693     CHECK(m != nullptr);
1694     CHECK(m->IsRuntimeMethod());
1695     DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1696     if (!IsInBootImage(m)) {
1697       AssignMethodOffset(m, NativeObjectRelocationType::kRuntimeMethod, GetDefaultOatIndex());
1698     }
1699   }
1700 
1701   // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
1702   // this lock while holding other locks may cause lock order violations.
1703   {
1704     auto deflate_monitor = [](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1705       Monitor::Deflate(Thread::Current(), obj);
1706     };
1707     heap->VisitObjects(deflate_monitor);
1708   }
1709 
1710   // Work list of <object, oat_index> for objects. Everything on the stack must already be
1711   // assigned a bin slot.
1712   WorkStack work_stack;
1713 
1714   // Special case interned strings to put them in the image they are likely to be resolved from.
1715   for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
1716     auto it = dex_file_oat_index_map_.find(dex_file);
1717     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
1718     const size_t oat_index = it->second;
1719     InternTable* const intern_table = runtime->GetInternTable();
1720     for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
1721       uint32_t utf16_length;
1722       const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
1723                                                                       &utf16_length);
1724       mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr();
1725       TryAssignBinSlot(work_stack, string, oat_index);
1726     }
1727   }
1728 
1729   // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
1730   // visits roots while holding various locks.
1731   {
1732     std::vector<mirror::Object*> roots;
1733     GetRootsVisitor root_visitor(&roots);
1734     runtime->VisitRoots(&root_visitor);
1735     for (mirror::Object* obj : roots) {
1736       TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
1737     }
1738   }
1739   ProcessWorkStack(&work_stack);
1740 
1741   // For app images, there may be objects that are only held live by the by the boot image. One
1742   // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
1743   // does not fail any checks. TODO: We should probably avoid copying these objects.
1744   if (compile_app_image_) {
1745     for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
1746       DCHECK(space->IsImageSpace());
1747       gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
1748       live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
1749                                     reinterpret_cast<uintptr_t>(space->Limit()),
1750                                     [this, &work_stack](mirror::Object* obj)
1751           REQUIRES_SHARED(Locks::mutator_lock_) {
1752         VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
1753         // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
1754         obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
1755             visitor,
1756             visitor);
1757       });
1758     }
1759     // Process the work stack in case anything was added by TryAssignBinSlot.
1760     ProcessWorkStack(&work_stack);
1761 
1762     // Store the class loader in the class roots.
1763     CHECK_EQ(class_loaders_.size(), 1u);
1764     CHECK_EQ(image_roots.size(), 1u);
1765     CHECK(*class_loaders_.begin() != nullptr);
1766     image_roots[0]->Set<false>(ImageHeader::kClassLoader, *class_loaders_.begin());
1767   }
1768 
1769   // Verify that all objects have assigned image bin slots.
1770   {
1771     auto ensure_bin_slots_assigned = [&](mirror::Object* obj)
1772         REQUIRES_SHARED(Locks::mutator_lock_) {
1773       if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) {
1774         CHECK(IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj;
1775       }
1776     };
1777     heap->VisitObjects(ensure_bin_slots_assigned);
1778   }
1779 
1780   // Calculate size of the dex cache arrays slot and prepare offsets.
1781   PrepareDexCacheArraySlots();
1782 
1783   // Calculate the sizes of the intern tables, class tables, and fixup tables.
1784   for (ImageInfo& image_info : image_infos_) {
1785     // Calculate how big the intern table will be after being serialized.
1786     InternTable* const intern_table = image_info.intern_table_.get();
1787     CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1788     if (intern_table->StrongSize() != 0u) {
1789       image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1790     }
1791 
1792     // Calculate the size of the class table.
1793     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
1794     DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u);
1795     if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) {
1796       image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
1797     }
1798   }
1799 
1800   // Calculate bin slot offsets.
1801   for (ImageInfo& image_info : image_infos_) {
1802     size_t bin_offset = image_objects_offset_begin_;
1803     for (size_t i = 0; i != kNumberOfBins; ++i) {
1804       switch (static_cast<Bin>(i)) {
1805         case Bin::kArtMethodClean:
1806         case Bin::kArtMethodDirty: {
1807           bin_offset = RoundUp(bin_offset, method_alignment);
1808           break;
1809         }
1810         case Bin::kDexCacheArray:
1811           bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_));
1812           break;
1813         case Bin::kImTable:
1814         case Bin::kIMTConflictTable: {
1815           bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
1816           break;
1817         }
1818         default: {
1819           // Normal alignment.
1820         }
1821       }
1822       image_info.bin_slot_offsets_[i] = bin_offset;
1823       bin_offset += image_info.bin_slot_sizes_[i];
1824     }
1825     // NOTE: There may be additional padding between the bin slots and the intern table.
1826     DCHECK_EQ(image_info.image_end_,
1827               image_info.GetBinSizeSum(Bin::kMirrorCount) + image_objects_offset_begin_);
1828   }
1829 
1830   // Calculate image offsets.
1831   size_t image_offset = 0;
1832   for (ImageInfo& image_info : image_infos_) {
1833     image_info.image_begin_ = global_image_begin_ + image_offset;
1834     image_info.image_offset_ = image_offset;
1835     ImageSection unused_sections[ImageHeader::kSectionCount];
1836     image_info.image_size_ =
1837         RoundUp(image_info.CreateImageSections(unused_sections, compile_app_image_), kPageSize);
1838     // There should be no gaps until the next image.
1839     image_offset += image_info.image_size_;
1840   }
1841 
1842   // Transform each object's bin slot into an offset which will be used to do the final copy.
1843   {
1844     auto unbin_objects_into_offset = [&](mirror::Object* obj)
1845         REQUIRES_SHARED(Locks::mutator_lock_) {
1846       if (!IsInBootImage(obj)) {
1847         UnbinObjectsIntoOffset(obj);
1848       }
1849     };
1850     heap->VisitObjects(unbin_objects_into_offset);
1851   }
1852 
1853   size_t i = 0;
1854   for (ImageInfo& image_info : image_infos_) {
1855     image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1856     i++;
1857   }
1858 
1859   // Update the native relocations by adding their bin sums.
1860   for (auto& pair : native_object_relocations_) {
1861     NativeObjectRelocation& relocation = pair.second;
1862     Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
1863     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
1864     relocation.offset += image_info.GetBinSlotOffset(bin_type);
1865   }
1866 }
1867 
CreateImageSections(ImageSection * out_sections,bool app_image) const1868 size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections,
1869                                                    bool app_image) const {
1870   DCHECK(out_sections != nullptr);
1871 
1872   // Do not round up any sections here that are represented by the bins since it will break
1873   // offsets.
1874 
1875   // Objects section
1876   ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects];
1877   *objects_section = ImageSection(0u, image_end_);
1878 
1879   // Add field section.
1880   ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields];
1881   *field_section = ImageSection(GetBinSlotOffset(Bin::kArtField), GetBinSlotSize(Bin::kArtField));
1882 
1883   // Add method section.
1884   ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1885   *methods_section = ImageSection(
1886       GetBinSlotOffset(Bin::kArtMethodClean),
1887       GetBinSlotSize(Bin::kArtMethodClean) + GetBinSlotSize(Bin::kArtMethodDirty));
1888 
1889   // IMT section.
1890   ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables];
1891   *imt_section = ImageSection(GetBinSlotOffset(Bin::kImTable), GetBinSlotSize(Bin::kImTable));
1892 
1893   // Conflict tables section.
1894   ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables];
1895   *imt_conflict_tables_section = ImageSection(GetBinSlotOffset(Bin::kIMTConflictTable),
1896                                               GetBinSlotSize(Bin::kIMTConflictTable));
1897 
1898   // Runtime methods section.
1899   ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods];
1900   *runtime_methods_section = ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod),
1901                                           GetBinSlotSize(Bin::kRuntimeMethod));
1902 
1903   // Add dex cache arrays section.
1904   ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1905   *dex_cache_arrays_section = ImageSection(GetBinSlotOffset(Bin::kDexCacheArray),
1906                                            GetBinSlotSize(Bin::kDexCacheArray));
1907   // For boot image, round up to the page boundary to separate the interned strings and
1908   // class table from the modifiable data. We shall mprotect() these pages read-only when
1909   // we load the boot image. This is more than sufficient for the string table alignment,
1910   // namely sizeof(uint64_t). See HashSet::WriteToMemory.
1911   static_assert(IsAligned<sizeof(uint64_t)>(kPageSize), "String table alignment check.");
1912   size_t cur_pos =
1913       RoundUp(dex_cache_arrays_section->End(), app_image ? sizeof(uint64_t) : kPageSize);
1914   // Calculate the size of the interned strings.
1915   ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
1916   *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1917   cur_pos = interned_strings_section->End();
1918   // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1919   cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1920   // Calculate the size of the class table section.
1921   ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
1922   *class_table_section = ImageSection(cur_pos, class_table_bytes_);
1923   cur_pos = class_table_section->End();
1924   // Image end goes right before the start of the image bitmap.
1925   return cur_pos;
1926 }
1927 
CreateHeader(size_t oat_index)1928 void ImageWriter::CreateHeader(size_t oat_index) {
1929   ImageInfo& image_info = GetImageInfo(oat_index);
1930   const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1931   const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1932   const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
1933 
1934   // Create the image sections.
1935   ImageSection sections[ImageHeader::kSectionCount];
1936   const size_t image_end = image_info.CreateImageSections(sections, compile_app_image_);
1937 
1938   // Finally bitmap section.
1939   const size_t bitmap_bytes = image_info.image_bitmap_->Size();
1940   auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
1941   *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1942   if (VLOG_IS_ON(compiler)) {
1943     LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
1944     size_t idx = 0;
1945     for (const ImageSection& section : sections) {
1946       LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1947       ++idx;
1948     }
1949     LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1950     LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1951     LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1952               << " Image offset=" << image_info.image_offset_ << std::dec;
1953     LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1954               << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1955               << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1956               << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
1957   }
1958   // Store boot image info for app image so that we can relocate.
1959   uint32_t boot_image_begin = 0;
1960   uint32_t boot_image_end = 0;
1961   uint32_t boot_oat_begin = 0;
1962   uint32_t boot_oat_end = 0;
1963   gc::Heap* const heap = Runtime::Current()->GetHeap();
1964   heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
1965 
1966   // Create the header, leave 0 for data size since we will fill this in as we are writing the
1967   // image.
1968   new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1969                                                image_end,
1970                                                sections,
1971                                                image_info.image_roots_address_,
1972                                                image_info.oat_checksum_,
1973                                                PointerToLowMemUInt32(oat_file_begin),
1974                                                PointerToLowMemUInt32(image_info.oat_data_begin_),
1975                                                PointerToLowMemUInt32(oat_data_end),
1976                                                PointerToLowMemUInt32(oat_file_end),
1977                                                boot_image_begin,
1978                                                boot_image_end - boot_image_begin,
1979                                                boot_oat_begin,
1980                                                boot_oat_end - boot_oat_begin,
1981                                                static_cast<uint32_t>(target_ptr_size_),
1982                                                compile_pic_,
1983                                                /*is_pic*/compile_app_image_,
1984                                                image_storage_mode_,
1985                                                /*data_size*/0u);
1986 }
1987 
GetImageMethodAddress(ArtMethod * method)1988 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
1989   auto it = native_object_relocations_.find(method);
1990   CHECK(it != native_object_relocations_.end()) << ArtMethod::PrettyMethod(method) << " @ "
1991                                                 << method;
1992   size_t oat_index = GetOatIndex(method->GetDexCache());
1993   ImageInfo& image_info = GetImageInfo(oat_index);
1994   CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1995   return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
1996 }
1997 
1998 class ImageWriter::FixupRootVisitor : public RootVisitor {
1999  public:
FixupRootVisitor(ImageWriter * image_writer)2000   explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
2001   }
2002 
VisitRoots(mirror::Object *** roots ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED,const RootInfo & info ATTRIBUTE_UNUSED)2003   void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
2004                   size_t count ATTRIBUTE_UNUSED,
2005                   const RootInfo& info ATTRIBUTE_UNUSED)
2006       OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
2007     LOG(FATAL) << "Unsupported";
2008   }
2009 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)2010   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
2011                   const RootInfo& info ATTRIBUTE_UNUSED)
2012       OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
2013     for (size_t i = 0; i < count; ++i) {
2014       image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr());
2015     }
2016   }
2017 
2018  private:
2019   ImageWriter* const image_writer_;
2020 };
2021 
CopyAndFixupImTable(ImTable * orig,ImTable * copy)2022 void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
2023   for (size_t i = 0; i < ImTable::kSize; ++i) {
2024     ArtMethod* method = orig->Get(i, target_ptr_size_);
2025     void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
2026     CopyAndFixupPointer(address, method);
2027     DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
2028   }
2029 }
2030 
CopyAndFixupImtConflictTable(ImtConflictTable * orig,ImtConflictTable * copy)2031 void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
2032   const size_t count = orig->NumEntries(target_ptr_size_);
2033   for (size_t i = 0; i < count; ++i) {
2034     ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
2035     ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
2036     CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
2037     CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_),
2038                         implementation_method);
2039     DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
2040               NativeLocationInImage(interface_method));
2041     DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
2042               NativeLocationInImage(implementation_method));
2043   }
2044 }
2045 
CopyAndFixupNativeData(size_t oat_index)2046 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
2047   const ImageInfo& image_info = GetImageInfo(oat_index);
2048   // Copy ArtFields and methods to their locations and update the array for convenience.
2049   for (auto& pair : native_object_relocations_) {
2050     NativeObjectRelocation& relocation = pair.second;
2051     // Only work with fields and methods that are in the current oat file.
2052     if (relocation.oat_index != oat_index) {
2053       continue;
2054     }
2055     auto* dest = image_info.image_->Begin() + relocation.offset;
2056     DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
2057     DCHECK(!IsInBootImage(pair.first));
2058     switch (relocation.type) {
2059       case NativeObjectRelocationType::kArtField: {
2060         memcpy(dest, pair.first, sizeof(ArtField));
2061         CopyReference(
2062             reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(),
2063             reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr());
2064         break;
2065       }
2066       case NativeObjectRelocationType::kRuntimeMethod:
2067       case NativeObjectRelocationType::kArtMethodClean:
2068       case NativeObjectRelocationType::kArtMethodDirty: {
2069         CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
2070                            reinterpret_cast<ArtMethod*>(dest),
2071                            image_info);
2072         break;
2073       }
2074       // For arrays, copy just the header since the elements will get copied by their corresponding
2075       // relocations.
2076       case NativeObjectRelocationType::kArtFieldArray: {
2077         memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
2078         break;
2079       }
2080       case NativeObjectRelocationType::kArtMethodArrayClean:
2081       case NativeObjectRelocationType::kArtMethodArrayDirty: {
2082         size_t size = ArtMethod::Size(target_ptr_size_);
2083         size_t alignment = ArtMethod::Alignment(target_ptr_size_);
2084         memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
2085         // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
2086         reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
2087         break;
2088       }
2089       case NativeObjectRelocationType::kDexCacheArray:
2090         // Nothing to copy here, everything is done in FixupDexCache().
2091         break;
2092       case NativeObjectRelocationType::kIMTable: {
2093         ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
2094         ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
2095         CopyAndFixupImTable(orig_imt, dest_imt);
2096         break;
2097       }
2098       case NativeObjectRelocationType::kIMTConflictTable: {
2099         auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
2100         CopyAndFixupImtConflictTable(
2101             orig_table,
2102             new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
2103         break;
2104       }
2105     }
2106   }
2107   // Fixup the image method roots.
2108   auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
2109   for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
2110     ArtMethod* method = image_methods_[i];
2111     CHECK(method != nullptr);
2112     if (!IsInBootImage(method)) {
2113       method = NativeLocationInImage(method);
2114     }
2115     image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
2116   }
2117   FixupRootVisitor root_visitor(this);
2118 
2119   // Write the intern table into the image.
2120   if (image_info.intern_table_bytes_ > 0) {
2121     const ImageSection& intern_table_section = image_header->GetInternedStringsSection();
2122     InternTable* const intern_table = image_info.intern_table_.get();
2123     uint8_t* const intern_table_memory_ptr =
2124         image_info.image_->Begin() + intern_table_section.Offset();
2125     const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
2126     CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
2127     // Fixup the pointers in the newly written intern table to contain image addresses.
2128     InternTable temp_intern_table;
2129     // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
2130     // the VisitRoots() will update the memory directly rather than the copies.
2131     // This also relies on visit roots not doing any verification which could fail after we update
2132     // the roots to be the image addresses.
2133     temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
2134     CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
2135     temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
2136   }
2137   // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
2138   // class loaders. Writing multiple class tables into the image is currently unsupported.
2139   if (image_info.class_table_bytes_ > 0u) {
2140     const ImageSection& class_table_section = image_header->GetClassTableSection();
2141     uint8_t* const class_table_memory_ptr =
2142         image_info.image_->Begin() + class_table_section.Offset();
2143     ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
2144 
2145     ClassTable* table = image_info.class_table_.get();
2146     CHECK(table != nullptr);
2147     const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
2148     CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
2149     // Fixup the pointers in the newly written class table to contain image addresses. See
2150     // above comment for intern tables.
2151     ClassTable temp_class_table;
2152     temp_class_table.ReadFromMemory(class_table_memory_ptr);
2153     CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(),
2154              table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses());
2155     UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
2156     temp_class_table.VisitRoots(visitor);
2157   }
2158 }
2159 
CopyAndFixupObjects()2160 void ImageWriter::CopyAndFixupObjects() {
2161   auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
2162     DCHECK(obj != nullptr);
2163     CopyAndFixupObject(obj);
2164   };
2165   Runtime::Current()->GetHeap()->VisitObjects(visitor);
2166   // Fix up the object previously had hash codes.
2167   for (const auto& hash_pair : saved_hashcode_map_) {
2168     Object* obj = hash_pair.first;
2169     DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
2170     obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
2171   }
2172   saved_hashcode_map_.clear();
2173 }
2174 
FixupPointerArray(mirror::Object * dst,mirror::PointerArray * arr,mirror::Class * klass,Bin array_type)2175 void ImageWriter::FixupPointerArray(mirror::Object* dst,
2176                                     mirror::PointerArray* arr,
2177                                     mirror::Class* klass,
2178                                     Bin array_type) {
2179   CHECK(klass->IsArrayClass());
2180   CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr;
2181   // Fixup int and long pointers for the ArtMethod or ArtField arrays.
2182   const size_t num_elements = arr->GetLength();
2183   dst->SetClass(GetImageAddress(arr->GetClass()));
2184   auto* dest_array = down_cast<mirror::PointerArray*>(dst);
2185   for (size_t i = 0, count = num_elements; i < count; ++i) {
2186     void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
2187     if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
2188       auto it = native_object_relocations_.find(elem);
2189       if (UNLIKELY(it == native_object_relocations_.end())) {
2190         if (it->second.IsArtMethodRelocation()) {
2191           auto* method = reinterpret_cast<ArtMethod*>(elem);
2192           LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
2193                      << method << " idx=" << i << "/" << num_elements << " with declaring class "
2194                      << Class::PrettyClass(method->GetDeclaringClass());
2195         } else {
2196           CHECK_EQ(array_type, Bin::kArtField);
2197           auto* field = reinterpret_cast<ArtField*>(elem);
2198           LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ "
2199               << field << " idx=" << i << "/" << num_elements << " with declaring class "
2200               << Class::PrettyClass(field->GetDeclaringClass());
2201         }
2202         UNREACHABLE();
2203       }
2204     }
2205     CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
2206   }
2207 }
2208 
CopyAndFixupObject(Object * obj)2209 void ImageWriter::CopyAndFixupObject(Object* obj) {
2210   if (IsInBootImage(obj)) {
2211     return;
2212   }
2213   size_t offset = GetImageOffset(obj);
2214   size_t oat_index = GetOatIndex(obj);
2215   ImageInfo& image_info = GetImageInfo(oat_index);
2216   auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
2217   DCHECK_LT(offset, image_info.image_end_);
2218   const auto* src = reinterpret_cast<const uint8_t*>(obj);
2219 
2220   image_info.image_bitmap_->Set(dst);  // Mark the obj as live.
2221 
2222   const size_t n = obj->SizeOf();
2223   DCHECK_LE(offset + n, image_info.image_->Size());
2224   memcpy(dst, src, n);
2225 
2226   // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
2227   // word.
2228   const auto it = saved_hashcode_map_.find(obj);
2229   dst->SetLockWord(it != saved_hashcode_map_.end() ?
2230       LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
2231   if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
2232     // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
2233     // safe since we mark all of the objects that may reference non immune objects as gray.
2234     CHECK(dst->AtomicSetMarkBit(0, 1));
2235   }
2236   FixupObject(obj, dst);
2237 }
2238 
2239 // Rewrite all the references in the copied object to point to their image address equivalent
2240 class ImageWriter::FixupVisitor {
2241  public:
FixupVisitor(ImageWriter * image_writer,Object * copy)2242   FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
2243   }
2244 
2245   // Ignore class roots since we don't have a way to map them to the destination. These are handled
2246   // with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const2247   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
2248       const {}
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const2249   void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
2250 
2251 
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const2252   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
2253       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2254     ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
2255     // Copy the reference and record the fixup if necessary.
2256     image_writer_->CopyReference(
2257         copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset),
2258         ref.Ptr());
2259   }
2260 
2261   // java.lang.ref.Reference visitor.
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const2262   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2263                   ObjPtr<mirror::Reference> ref) const
2264       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2265     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
2266   }
2267 
2268  protected:
2269   ImageWriter* const image_writer_;
2270   mirror::Object* const copy_;
2271 };
2272 
2273 class ImageWriter::FixupClassVisitor FINAL : public FixupVisitor {
2274  public:
FixupClassVisitor(ImageWriter * image_writer,Object * copy)2275   FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
2276   }
2277 
operator ()(ObjPtr<Object> obj,MemberOffset offset,bool is_static ATTRIBUTE_UNUSED) const2278   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
2279       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
2280     DCHECK(obj->IsClass());
2281     FixupVisitor::operator()(obj, offset, /*is_static*/false);
2282   }
2283 
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const2284   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
2285                   ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
2286       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
2287     LOG(FATAL) << "Reference not expected here.";
2288   }
2289 };
2290 
NativeOffsetInImage(void * obj)2291 uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
2292   DCHECK(obj != nullptr);
2293   DCHECK(!IsInBootImage(obj));
2294   auto it = native_object_relocations_.find(obj);
2295   CHECK(it != native_object_relocations_.end()) << obj << " spaces "
2296       << Runtime::Current()->GetHeap()->DumpSpaces();
2297   const NativeObjectRelocation& relocation = it->second;
2298   return relocation.offset;
2299 }
2300 
2301 template <typename T>
PrettyPrint(T * ptr)2302 std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
2303   std::ostringstream oss;
2304   oss << ptr;
2305   return oss.str();
2306 }
2307 
2308 template <>
PrettyPrint(ArtMethod * method)2309 std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
2310   return ArtMethod::PrettyMethod(method);
2311 }
2312 
2313 template <typename T>
NativeLocationInImage(T * obj)2314 T* ImageWriter::NativeLocationInImage(T* obj) {
2315   if (obj == nullptr || IsInBootImage(obj)) {
2316     return obj;
2317   } else {
2318     auto it = native_object_relocations_.find(obj);
2319     CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj)
2320         << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces();
2321     const NativeObjectRelocation& relocation = it->second;
2322     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2323     return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
2324   }
2325 }
2326 
2327 template <typename T>
NativeCopyLocation(T * obj,mirror::DexCache * dex_cache)2328 T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
2329   if (obj == nullptr || IsInBootImage(obj)) {
2330     return obj;
2331   } else {
2332     size_t oat_index = GetOatIndexForDexCache(dex_cache);
2333     ImageInfo& image_info = GetImageInfo(oat_index);
2334     return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
2335   }
2336 }
2337 
2338 class ImageWriter::NativeLocationVisitor {
2339  public:
NativeLocationVisitor(ImageWriter * image_writer)2340   explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
2341 
2342   template <typename T>
operator ()(T * ptr,void ** dest_addr=nullptr) const2343   T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2344     if (dest_addr != nullptr) {
2345       image_writer_->CopyAndFixupPointer(dest_addr, ptr);
2346     }
2347     return image_writer_->NativeLocationInImage(ptr);
2348   }
2349 
2350  private:
2351   ImageWriter* const image_writer_;
2352 };
2353 
FixupClass(mirror::Class * orig,mirror::Class * copy)2354 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
2355   orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
2356   FixupClassVisitor visitor(this, copy);
2357   ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
2358 
2359   if (kBitstringSubtypeCheckEnabled && compile_app_image_) {
2360     // When we call SubtypeCheck::EnsureInitialize, it Assigns new bitstring
2361     // values to the parent of that class.
2362     //
2363     // Every time this happens, the parent class has to mutate to increment
2364     // the "Next" value.
2365     //
2366     // If any of these parents are in the boot image, the changes [in the parents]
2367     // would be lost when the app image is reloaded.
2368     //
2369     // To prevent newly loaded classes (not in the app image) from being reassigned
2370     // the same bitstring value as an existing app image class, uninitialize
2371     // all the classes in the app image.
2372     //
2373     // On startup, the class linker will then re-initialize all the app
2374     // image bitstrings. See also ClassLinker::AddImageSpace.
2375     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2376     // Lock every time to prevent a dcheck failure when we suspend with the lock held.
2377     SubtypeCheck<mirror::Class*>::ForceUninitialize(copy);
2378   }
2379 
2380   // Remove the clinitThreadId. This is required for image determinism.
2381   copy->SetClinitThreadId(static_cast<pid_t>(0));
2382 }
2383 
FixupObject(Object * orig,Object * copy)2384 void ImageWriter::FixupObject(Object* orig, Object* copy) {
2385   DCHECK(orig != nullptr);
2386   DCHECK(copy != nullptr);
2387   if (kUseBakerReadBarrier) {
2388     orig->AssertReadBarrierState();
2389   }
2390   auto* klass = orig->GetClass();
2391   if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
2392     // Is this a native pointer array?
2393     auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
2394     if (it != pointer_arrays_.end()) {
2395       // Should only need to fixup every pointer array exactly once.
2396       FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
2397       pointer_arrays_.erase(it);
2398       return;
2399     }
2400   }
2401   if (orig->IsClass()) {
2402     FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
2403   } else {
2404     if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
2405       // Need to go update the ArtMethod.
2406       auto* dest = down_cast<mirror::Executable*>(copy);
2407       auto* src = down_cast<mirror::Executable*>(orig);
2408       ArtMethod* src_method = src->GetArtMethod();
2409       dest->SetArtMethod(GetImageMethodAddress(src_method));
2410     } else if (!klass->IsArrayClass()) {
2411       ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2412       if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
2413         FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
2414       } else if (klass->IsClassLoaderClass()) {
2415         mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
2416         // If src is a ClassLoader, set the class table to null so that it gets recreated by the
2417         // ClassLoader.
2418         copy_loader->SetClassTable(nullptr);
2419         // Also set allocator to null to be safe. The allocator is created when we create the class
2420         // table. We also never expect to unload things in the image since they are held live as
2421         // roots.
2422         copy_loader->SetAllocator(nullptr);
2423       }
2424     }
2425     FixupVisitor visitor(this, copy);
2426     orig->VisitReferences(visitor, visitor);
2427   }
2428 }
2429 
2430 class ImageWriter::ImageAddressVisitorForDexCacheArray {
2431  public:
ImageAddressVisitorForDexCacheArray(ImageWriter * image_writer)2432   explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer)
2433       : image_writer_(image_writer) {}
2434 
2435   template <typename T>
operator ()(T * ptr) const2436   T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
2437     return image_writer_->GetImageAddress(ptr);
2438   }
2439 
2440  private:
2441   ImageWriter* const image_writer_;
2442 };
2443 
FixupDexCache(mirror::DexCache * orig_dex_cache,mirror::DexCache * copy_dex_cache)2444 void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
2445                                 mirror::DexCache* copy_dex_cache) {
2446   ImageAddressVisitorForDexCacheArray fixup_visitor(this);
2447   // Though the DexCache array fields are usually treated as native pointers, we set the full
2448   // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
2449   // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
2450   //     static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
2451   mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings();
2452   if (orig_strings != nullptr) {
2453     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
2454                                                NativeLocationInImage(orig_strings),
2455                                                PointerSize::k64);
2456     orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor);
2457   }
2458   mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes();
2459   if (orig_types != nullptr) {
2460     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
2461                                                NativeLocationInImage(orig_types),
2462                                                PointerSize::k64);
2463     orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
2464                                        fixup_visitor);
2465   }
2466   mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods();
2467   if (orig_methods != nullptr) {
2468     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
2469                                                NativeLocationInImage(orig_methods),
2470                                                PointerSize::k64);
2471     mirror::MethodDexCacheType* copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
2472     for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
2473       mirror::MethodDexCachePair orig_pair =
2474           mirror::DexCache::GetNativePairPtrSize(orig_methods, i, target_ptr_size_);
2475       // NativeLocationInImage also handles runtime methods since these have relocation info.
2476       mirror::MethodDexCachePair copy_pair(NativeLocationInImage(orig_pair.object),
2477                                            orig_pair.index);
2478       mirror::DexCache::SetNativePairPtrSize(copy_methods, i, copy_pair, target_ptr_size_);
2479     }
2480   }
2481   mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
2482   if (orig_fields != nullptr) {
2483     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
2484                                                NativeLocationInImage(orig_fields),
2485                                                PointerSize::k64);
2486     mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
2487     for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
2488       mirror::FieldDexCachePair orig =
2489           mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_);
2490       mirror::FieldDexCachePair copy = orig;
2491       copy.object = NativeLocationInImage(orig.object);
2492       mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_);
2493     }
2494   }
2495   mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes();
2496   if (orig_method_types != nullptr) {
2497     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodTypesOffset(),
2498                                                NativeLocationInImage(orig_method_types),
2499                                                PointerSize::k64);
2500     orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache),
2501                                              fixup_visitor);
2502   }
2503   GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites();
2504   if (orig_call_sites != nullptr) {
2505     copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedCallSitesOffset(),
2506                                                NativeLocationInImage(orig_call_sites),
2507                                                PointerSize::k64);
2508     orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache),
2509                                            fixup_visitor);
2510   }
2511 
2512   // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
2513   // compiler pointers in here will make the output non-deterministic.
2514   copy_dex_cache->SetDexFile(nullptr);
2515 }
2516 
GetOatAddress(StubType type) const2517 const uint8_t* ImageWriter::GetOatAddress(StubType type) const {
2518   DCHECK_LE(type, StubType::kLast);
2519   // If we are compiling an app image, we need to use the stubs of the boot image.
2520   if (compile_app_image_) {
2521     // Use the current image pointers.
2522     const std::vector<gc::space::ImageSpace*>& image_spaces =
2523         Runtime::Current()->GetHeap()->GetBootImageSpaces();
2524     DCHECK(!image_spaces.empty());
2525     const OatFile* oat_file = image_spaces[0]->GetOatFile();
2526     CHECK(oat_file != nullptr);
2527     const OatHeader& header = oat_file->GetOatHeader();
2528     switch (type) {
2529       // TODO: We could maybe clean this up if we stored them in an array in the oat header.
2530       case StubType::kQuickGenericJNITrampoline:
2531         return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
2532       case StubType::kInterpreterToInterpreterBridge:
2533         return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
2534       case StubType::kInterpreterToCompiledCodeBridge:
2535         return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
2536       case StubType::kJNIDlsymLookup:
2537         return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
2538       case StubType::kQuickIMTConflictTrampoline:
2539         return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
2540       case StubType::kQuickResolutionTrampoline:
2541         return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
2542       case StubType::kQuickToInterpreterBridge:
2543         return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
2544       default:
2545         UNREACHABLE();
2546     }
2547   }
2548   const ImageInfo& primary_image_info = GetImageInfo(0);
2549   return GetOatAddressForOffset(primary_image_info.GetStubOffset(type), primary_image_info);
2550 }
2551 
GetQuickCode(ArtMethod * method,const ImageInfo & image_info,bool * quick_is_interpreted)2552 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
2553                                          const ImageInfo& image_info,
2554                                          bool* quick_is_interpreted) {
2555   DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
2556   DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
2557   DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
2558   DCHECK(method->IsInvokable()) << method->PrettyMethod();
2559   DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
2560 
2561   // Use original code if it exists. Otherwise, set the code pointer to the resolution
2562   // trampoline.
2563 
2564   // Quick entrypoint:
2565   const void* quick_oat_entry_point =
2566       method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
2567   const uint8_t* quick_code;
2568 
2569   if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
2570     DCHECK(method->IsCopied());
2571     // If the code is not in the oat file corresponding to this image (e.g. default methods)
2572     quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
2573   } else {
2574     uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
2575     quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
2576   }
2577 
2578   *quick_is_interpreted = false;
2579   if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
2580       method->GetDeclaringClass()->IsInitialized())) {
2581     // We have code for a non-static or initialized method, just use the code.
2582   } else if (quick_code == nullptr && method->IsNative() &&
2583       (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2584     // Non-static or initialized native method missing compiled code, use generic JNI version.
2585     quick_code = GetOatAddress(StubType::kQuickGenericJNITrampoline);
2586   } else if (quick_code == nullptr && !method->IsNative()) {
2587     // We don't have code at all for a non-native method, use the interpreter.
2588     quick_code = GetOatAddress(StubType::kQuickToInterpreterBridge);
2589     *quick_is_interpreted = true;
2590   } else {
2591     CHECK(!method->GetDeclaringClass()->IsInitialized());
2592     // We have code for a static method, but need to go through the resolution stub for class
2593     // initialization.
2594     quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
2595   }
2596   if (!IsInBootOatFile(quick_code)) {
2597     // DCHECK_GE(quick_code, oat_data_begin_);
2598   }
2599   return quick_code;
2600 }
2601 
CopyAndFixupMethod(ArtMethod * orig,ArtMethod * copy,const ImageInfo & image_info)2602 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2603                                      ArtMethod* copy,
2604                                      const ImageInfo& image_info) {
2605   if (orig->IsAbstract()) {
2606     // Ignore the single-implementation info for abstract method.
2607     // Do this on orig instead of copy, otherwise there is a crash due to methods
2608     // are copied before classes.
2609     // TODO: handle fixup of single-implementation method for abstract method.
2610     orig->SetHasSingleImplementation(false);
2611     orig->SetSingleImplementation(
2612         nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
2613   }
2614 
2615   memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
2616 
2617   CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
2618 
2619   // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2620   // oat_begin_
2621 
2622   // The resolution method has a special trampoline to call.
2623   Runtime* runtime = Runtime::Current();
2624   if (orig->IsRuntimeMethod()) {
2625     ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
2626     if (orig_table != nullptr) {
2627       // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
2628       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2629           GetOatAddress(StubType::kQuickIMTConflictTrampoline), target_ptr_size_);
2630       copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_);
2631     } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2632       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2633           GetOatAddress(StubType::kQuickResolutionTrampoline), target_ptr_size_);
2634     } else {
2635       bool found_one = false;
2636       for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
2637         auto idx = static_cast<CalleeSaveType>(i);
2638         if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2639           found_one = true;
2640           break;
2641         }
2642       }
2643       CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
2644       CHECK(copy->IsRuntimeMethod());
2645     }
2646   } else {
2647     // We assume all methods have code. If they don't currently then we set them to the use the
2648     // resolution trampoline. Abstract methods never have code and so we need to make sure their
2649     // use results in an AbstractMethodError. We use the interpreter to achieve this.
2650     if (UNLIKELY(!orig->IsInvokable())) {
2651       copy->SetEntryPointFromQuickCompiledCodePtrSize(
2652           GetOatAddress(StubType::kQuickToInterpreterBridge), target_ptr_size_);
2653     } else {
2654       bool quick_is_interpreted;
2655       const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
2656       copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
2657 
2658       // JNI entrypoint:
2659       if (orig->IsNative()) {
2660         // The native method's pointer is set to a stub to lookup via dlsym.
2661         // Note this is not the code_ pointer, that is handled above.
2662         copy->SetEntryPointFromJniPtrSize(
2663             GetOatAddress(StubType::kJNIDlsymLookup), target_ptr_size_);
2664       }
2665     }
2666   }
2667 }
2668 
GetBinSizeSum(Bin up_to) const2669 size_t ImageWriter::ImageInfo::GetBinSizeSum(Bin up_to) const {
2670   DCHECK_LE(static_cast<size_t>(up_to), kNumberOfBins);
2671   return std::accumulate(&bin_slot_sizes_[0],
2672                          &bin_slot_sizes_[0] + static_cast<size_t>(up_to),
2673                          /*init*/ static_cast<size_t>(0));
2674 }
2675 
BinSlot(uint32_t lockword)2676 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2677   // These values may need to get updated if more bins are added to the enum Bin
2678   static_assert(kBinBits == 3, "wrong number of bin bits");
2679   static_assert(kBinShift == 27, "wrong number of shift");
2680   static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2681 
2682   DCHECK_LT(GetBin(), Bin::kMirrorCount);
2683   DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2684 }
2685 
BinSlot(Bin bin,uint32_t index)2686 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2687     : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2688   DCHECK_EQ(index, GetIndex());
2689 }
2690 
GetBin() const2691 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2692   return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2693 }
2694 
GetIndex() const2695 uint32_t ImageWriter::BinSlot::GetIndex() const {
2696   return lockword_ & ~kBinMask;
2697 }
2698 
BinTypeForNativeRelocationType(NativeObjectRelocationType type)2699 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2700   switch (type) {
2701     case NativeObjectRelocationType::kArtField:
2702     case NativeObjectRelocationType::kArtFieldArray:
2703       return Bin::kArtField;
2704     case NativeObjectRelocationType::kArtMethodClean:
2705     case NativeObjectRelocationType::kArtMethodArrayClean:
2706       return Bin::kArtMethodClean;
2707     case NativeObjectRelocationType::kArtMethodDirty:
2708     case NativeObjectRelocationType::kArtMethodArrayDirty:
2709       return Bin::kArtMethodDirty;
2710     case NativeObjectRelocationType::kDexCacheArray:
2711       return Bin::kDexCacheArray;
2712     case NativeObjectRelocationType::kRuntimeMethod:
2713       return Bin::kRuntimeMethod;
2714     case NativeObjectRelocationType::kIMTable:
2715       return Bin::kImTable;
2716     case NativeObjectRelocationType::kIMTConflictTable:
2717       return Bin::kIMTConflictTable;
2718   }
2719   UNREACHABLE();
2720 }
2721 
GetOatIndex(mirror::Object * obj) const2722 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
2723   if (!IsMultiImage()) {
2724     return GetDefaultOatIndex();
2725   }
2726   auto it = oat_index_map_.find(obj);
2727   DCHECK(it != oat_index_map_.end()) << obj;
2728   return it->second;
2729 }
2730 
GetOatIndexForDexFile(const DexFile * dex_file) const2731 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
2732   if (!IsMultiImage()) {
2733     return GetDefaultOatIndex();
2734   }
2735   auto it = dex_file_oat_index_map_.find(dex_file);
2736   DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2737   return it->second;
2738 }
2739 
GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const2740 size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
2741   return (dex_cache == nullptr)
2742       ? GetDefaultOatIndex()
2743       : GetOatIndexForDexFile(dex_cache->GetDexFile());
2744 }
2745 
UpdateOatFileLayout(size_t oat_index,size_t oat_loaded_size,size_t oat_data_offset,size_t oat_data_size)2746 void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2747                                       size_t oat_loaded_size,
2748                                       size_t oat_data_offset,
2749                                       size_t oat_data_size) {
2750   const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2751   for (const ImageInfo& info : image_infos_) {
2752     DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2753   }
2754   DCHECK(images_end != nullptr);  // Image space must be ready.
2755 
2756   ImageInfo& cur_image_info = GetImageInfo(oat_index);
2757   cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2758   cur_image_info.oat_loaded_size_ = oat_loaded_size;
2759   cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2760   cur_image_info.oat_size_ = oat_data_size;
2761 
2762   if (compile_app_image_) {
2763     CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2764     return;
2765   }
2766 
2767   // Update the oat_offset of the next image info.
2768   if (oat_index + 1u != oat_filenames_.size()) {
2769     // There is a following one.
2770     ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
2771     next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2772   }
2773 }
2774 
UpdateOatFileHeader(size_t oat_index,const OatHeader & oat_header)2775 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2776   ImageInfo& cur_image_info = GetImageInfo(oat_index);
2777   cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2778 
2779   if (oat_index == GetDefaultOatIndex()) {
2780     // Primary oat file, read the trampolines.
2781     cur_image_info.SetStubOffset(StubType::kInterpreterToInterpreterBridge,
2782                                  oat_header.GetInterpreterToInterpreterBridgeOffset());
2783     cur_image_info.SetStubOffset(StubType::kInterpreterToCompiledCodeBridge,
2784                                  oat_header.GetInterpreterToCompiledCodeBridgeOffset());
2785     cur_image_info.SetStubOffset(StubType::kJNIDlsymLookup,
2786                                  oat_header.GetJniDlsymLookupOffset());
2787     cur_image_info.SetStubOffset(StubType::kQuickGenericJNITrampoline,
2788                                  oat_header.GetQuickGenericJniTrampolineOffset());
2789     cur_image_info.SetStubOffset(StubType::kQuickIMTConflictTrampoline,
2790                                  oat_header.GetQuickImtConflictTrampolineOffset());
2791     cur_image_info.SetStubOffset(StubType::kQuickResolutionTrampoline,
2792                                  oat_header.GetQuickResolutionTrampolineOffset());
2793     cur_image_info.SetStubOffset(StubType::kQuickToInterpreterBridge,
2794                                  oat_header.GetQuickToInterpreterBridgeOffset());
2795   }
2796 }
2797 
ImageWriter(const CompilerDriver & compiler_driver,uintptr_t image_begin,bool compile_pic,bool compile_app_image,ImageHeader::StorageMode image_storage_mode,const std::vector<const char * > & oat_filenames,const std::unordered_map<const DexFile *,size_t> & dex_file_oat_index_map,const std::unordered_set<std::string> * dirty_image_objects)2798 ImageWriter::ImageWriter(
2799     const CompilerDriver& compiler_driver,
2800     uintptr_t image_begin,
2801     bool compile_pic,
2802     bool compile_app_image,
2803     ImageHeader::StorageMode image_storage_mode,
2804     const std::vector<const char*>& oat_filenames,
2805     const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map,
2806     const std::unordered_set<std::string>* dirty_image_objects)
2807     : compiler_driver_(compiler_driver),
2808       global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2809       image_objects_offset_begin_(0),
2810       compile_pic_(compile_pic),
2811       compile_app_image_(compile_app_image),
2812       target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
2813       image_infos_(oat_filenames.size()),
2814       dirty_methods_(0u),
2815       clean_methods_(0u),
2816       image_storage_mode_(image_storage_mode),
2817       oat_filenames_(oat_filenames),
2818       dex_file_oat_index_map_(dex_file_oat_index_map),
2819       dirty_image_objects_(dirty_image_objects) {
2820   CHECK_NE(image_begin, 0U);
2821   std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
2822   CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2823       << "Compiling a boot image should occur iff there are no boot image spaces loaded";
2824 }
2825 
ImageInfo()2826 ImageWriter::ImageInfo::ImageInfo()
2827     : intern_table_(new InternTable),
2828       class_table_(new ClassTable) {}
2829 
CopyReference(mirror::HeapReference<mirror::Object> * dest,ObjPtr<mirror::Object> src)2830 void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest,
2831                                 ObjPtr<mirror::Object> src) {
2832   dest->Assign(GetImageAddress(src.Ptr()));
2833 }
2834 
CopyReference(mirror::CompressedReference<mirror::Object> * dest,ObjPtr<mirror::Object> src)2835 void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest,
2836                                 ObjPtr<mirror::Object> src) {
2837   dest->Assign(GetImageAddress(src.Ptr()));
2838 }
2839 
CopyAndFixupPointer(void ** target,void * value)2840 void ImageWriter::CopyAndFixupPointer(void** target, void* value) {
2841   void* new_value = value;
2842   if (value != nullptr && !IsInBootImage(value)) {
2843     auto it = native_object_relocations_.find(value);
2844     CHECK(it != native_object_relocations_.end()) << value;
2845     const NativeObjectRelocation& relocation = it->second;
2846     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
2847     new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset);
2848   }
2849   if (target_ptr_size_ == PointerSize::k32) {
2850     *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value);
2851   } else {
2852     *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value);
2853   }
2854 }
2855 
2856 }  // namespace linker
2857 }  // namespace art
2858