1 /* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "profile_compilation_info.h" 18 19 #include <sys/file.h> 20 #include <sys/stat.h> 21 #include <sys/types.h> 22 #include <unistd.h> 23 #include <zlib.h> 24 25 #include <algorithm> 26 #include <cerrno> 27 #include <climits> 28 #include <cstdlib> 29 #include <iostream> 30 #include <numeric> 31 #include <random> 32 #include <string> 33 #include <vector> 34 35 #include "android-base/file.h" 36 37 #include "base/arena_allocator.h" 38 #include "base/dumpable.h" 39 #include "base/file_utils.h" 40 #include "base/logging.h" // For VLOG. 41 #include "base/malloc_arena_pool.h" 42 #include "base/os.h" 43 #include "base/safe_map.h" 44 #include "base/scoped_flock.h" 45 #include "base/stl_util.h" 46 #include "base/systrace.h" 47 #include "base/time_utils.h" 48 #include "base/unix_file/fd_file.h" 49 #include "base/utils.h" 50 #include "base/zip_archive.h" 51 #include "dex/dex_file_loader.h" 52 53 namespace art { 54 55 const uint8_t ProfileCompilationInfo::kProfileMagic[] = { 'p', 'r', 'o', '\0' }; 56 // Last profile version: merge profiles directly from the file without creating 57 // profile_compilation_info object. All the profile line headers are now placed together 58 // before corresponding method_encodings and class_ids. 59 const uint8_t ProfileCompilationInfo::kProfileVersion[] = { '0', '1', '0', '\0' }; 60 const uint8_t ProfileCompilationInfo::kProfileVersionWithCounters[] = { '5', '0', '0', '\0' }; 61 62 static_assert(sizeof(ProfileCompilationInfo::kProfileVersion) == 4, 63 "Invalid profile version size"); 64 static_assert(sizeof(ProfileCompilationInfo::kProfileVersionWithCounters) == 4, 65 "Invalid profile version size"); 66 67 // The name of the profile entry in the dex metadata file. 68 // DO NOT CHANGE THIS! (it's similar to classes.dex in the apk files). 69 const char ProfileCompilationInfo::kDexMetadataProfileEntry[] = "primary.prof"; 70 71 static constexpr uint16_t kMaxDexFileKeyLength = PATH_MAX; 72 73 // Debug flag to ignore checksums when testing if a method or a class is present in the profile. 74 // Used to facilitate testing profile guided compilation across a large number of apps 75 // using the same test profile. 76 static constexpr bool kDebugIgnoreChecksum = false; 77 78 static constexpr uint8_t kIsMissingTypesEncoding = 6; 79 static constexpr uint8_t kIsMegamorphicEncoding = 7; 80 81 static_assert(sizeof(ProfileCompilationInfo::kIndividualInlineCacheSize) == sizeof(uint8_t), 82 "InlineCache::kIndividualInlineCacheSize does not have the expect type size"); 83 static_assert(ProfileCompilationInfo::kIndividualInlineCacheSize < kIsMegamorphicEncoding, 84 "InlineCache::kIndividualInlineCacheSize is larger than expected"); 85 static_assert(ProfileCompilationInfo::kIndividualInlineCacheSize < kIsMissingTypesEncoding, 86 "InlineCache::kIndividualInlineCacheSize is larger than expected"); 87 88 static bool ChecksumMatch(uint32_t dex_file_checksum, uint32_t checksum) { 89 return kDebugIgnoreChecksum || dex_file_checksum == checksum; 90 } 91 92 // For storage efficiency we store aggregation counts of up to at most 2^16. 93 static uint16_t IncrementAggregationCounter(uint16_t counter, uint16_t value) { 94 if (counter < (std::numeric_limits<uint16_t>::max() - value)) { 95 return counter + value; 96 } else { 97 return std::numeric_limits<uint16_t>::max(); 98 } 99 } 100 101 ProfileCompilationInfo::ProfileCompilationInfo(ArenaPool* custom_arena_pool) 102 : default_arena_pool_(), 103 allocator_(custom_arena_pool), 104 info_(allocator_.Adapter(kArenaAllocProfile)), 105 profile_key_map_(std::less<const std::string>(), allocator_.Adapter(kArenaAllocProfile)), 106 aggregation_count_(0) { 107 InitProfileVersionInternal(kProfileVersion); 108 } 109 110 ProfileCompilationInfo::ProfileCompilationInfo() 111 : default_arena_pool_(), 112 allocator_(&default_arena_pool_), 113 info_(allocator_.Adapter(kArenaAllocProfile)), 114 profile_key_map_(std::less<const std::string>(), allocator_.Adapter(kArenaAllocProfile)), 115 aggregation_count_(0) { 116 InitProfileVersionInternal(kProfileVersion); 117 } 118 119 ProfileCompilationInfo::~ProfileCompilationInfo() { 120 VLOG(profiler) << Dumpable<MemStats>(allocator_.GetMemStats()); 121 ClearData(); 122 } 123 124 void ProfileCompilationInfo::DexPcData::AddClass(uint16_t dex_profile_idx, 125 const dex::TypeIndex& type_idx) { 126 if (is_megamorphic || is_missing_types) { 127 return; 128 } 129 130 // Perform an explicit lookup for the type instead of directly emplacing the 131 // element. We do this because emplace() allocates the node before doing the 132 // lookup and if it then finds an identical element, it shall deallocate the 133 // node. For Arena allocations, that's essentially a leak. 134 ClassReference ref(dex_profile_idx, type_idx); 135 auto it = classes.find(ref); 136 if (it != classes.end()) { 137 // The type index exists. 138 return; 139 } 140 141 // Check if the adding the type will cause the cache to become megamorphic. 142 if (classes.size() + 1 >= ProfileCompilationInfo::kIndividualInlineCacheSize) { 143 is_megamorphic = true; 144 classes.clear(); 145 return; 146 } 147 148 // The type does not exist and the inline cache will not be megamorphic. 149 classes.insert(ref); 150 } 151 152 // Transform the actual dex location into relative paths. 153 // Note: this is OK because we don't store profiles of different apps into the same file. 154 // Apps with split apks don't cause trouble because each split has a different name and will not 155 // collide with other entries. 156 std::string ProfileCompilationInfo::GetProfileDexFileKey(const std::string& dex_location) { 157 DCHECK(!dex_location.empty()); 158 size_t last_sep_index = dex_location.find_last_of('/'); 159 if (last_sep_index == std::string::npos) { 160 return dex_location; 161 } else { 162 DCHECK(last_sep_index < dex_location.size()); 163 return dex_location.substr(last_sep_index + 1); 164 } 165 } 166 167 bool ProfileCompilationInfo::AddMethodIndex(MethodHotness::Flag flags, const MethodReference& ref) { 168 DexFileData* data = GetOrAddDexFileData(ref.dex_file); 169 if (data == nullptr) { 170 return false; 171 } 172 return data->AddMethod(flags, ref.index); 173 } 174 175 bool ProfileCompilationInfo::AddMethodIndex(MethodHotness::Flag flags, 176 const std::string& dex_location, 177 uint32_t checksum, 178 uint16_t method_idx, 179 uint32_t num_method_ids) { 180 DexFileData* data = GetOrAddDexFileData(GetProfileDexFileKey(dex_location), 181 checksum, 182 num_method_ids); 183 if (data == nullptr) { 184 return false; 185 } 186 return data->AddMethod(flags, method_idx); 187 } 188 189 bool ProfileCompilationInfo::AddMethods(const std::vector<ProfileMethodInfo>& methods, 190 MethodHotness::Flag flags) { 191 for (const ProfileMethodInfo& method : methods) { 192 if (!AddMethod(method, flags)) { 193 return false; 194 } 195 } 196 return true; 197 } 198 199 bool ProfileCompilationInfo::AddClasses(const std::set<DexCacheResolvedClasses>& resolved_classes) { 200 for (const DexCacheResolvedClasses& dex_cache : resolved_classes) { 201 if (!AddResolvedClasses(dex_cache)) { 202 return false; 203 } 204 } 205 return true; 206 } 207 208 bool ProfileCompilationInfo::MergeWith(const std::string& filename) { 209 std::string error; 210 #ifdef _WIN32 211 int flags = O_RDONLY; 212 #else 213 int flags = O_RDONLY | O_NOFOLLOW | O_CLOEXEC; 214 #endif 215 ScopedFlock profile_file = 216 LockedFile::Open(filename.c_str(), flags, /*block=*/false, &error); 217 218 if (profile_file.get() == nullptr) { 219 LOG(WARNING) << "Couldn't lock the profile file " << filename << ": " << error; 220 return false; 221 } 222 223 int fd = profile_file->Fd(); 224 225 ProfileLoadStatus status = LoadInternal(fd, &error); 226 if (status == kProfileLoadSuccess) { 227 return true; 228 } 229 230 LOG(WARNING) << "Could not load profile data from file " << filename << ": " << error; 231 return false; 232 } 233 234 bool ProfileCompilationInfo::Load(const std::string& filename, bool clear_if_invalid) { 235 ScopedTrace trace(__PRETTY_FUNCTION__); 236 std::string error; 237 238 if (!IsEmpty()) { 239 return kProfileLoadWouldOverwiteData; 240 } 241 242 #ifdef _WIN32 243 int flags = O_RDWR; 244 #else 245 int flags = O_RDWR | O_NOFOLLOW | O_CLOEXEC; 246 #endif 247 // There's no need to fsync profile data right away. We get many chances 248 // to write it again in case something goes wrong. We can rely on a simple 249 // close(), no sync, and let to the kernel decide when to write to disk. 250 ScopedFlock profile_file = 251 LockedFile::Open(filename.c_str(), flags, /*block=*/false, &error); 252 253 if (profile_file.get() == nullptr) { 254 LOG(WARNING) << "Couldn't lock the profile file " << filename << ": " << error; 255 return false; 256 } 257 258 int fd = profile_file->Fd(); 259 260 ProfileLoadStatus status = LoadInternal(fd, &error); 261 if (status == kProfileLoadSuccess) { 262 return true; 263 } 264 265 if (clear_if_invalid && 266 ((status == kProfileLoadVersionMismatch) || (status == kProfileLoadBadData))) { 267 LOG(WARNING) << "Clearing bad or obsolete profile data from file " 268 << filename << ": " << error; 269 if (profile_file->ClearContent()) { 270 return true; 271 } else { 272 PLOG(WARNING) << "Could not clear profile file: " << filename; 273 return false; 274 } 275 } 276 277 LOG(WARNING) << "Could not load profile data from file " << filename << ": " << error; 278 return false; 279 } 280 281 bool ProfileCompilationInfo::Save(const std::string& filename, uint64_t* bytes_written) { 282 ScopedTrace trace(__PRETTY_FUNCTION__); 283 std::string error; 284 #ifdef _WIN32 285 int flags = O_WRONLY; 286 #else 287 int flags = O_WRONLY | O_NOFOLLOW | O_CLOEXEC; 288 #endif 289 // There's no need to fsync profile data right away. We get many chances 290 // to write it again in case something goes wrong. We can rely on a simple 291 // close(), no sync, and let to the kernel decide when to write to disk. 292 ScopedFlock profile_file = 293 LockedFile::Open(filename.c_str(), flags, /*block=*/false, &error); 294 if (profile_file.get() == nullptr) { 295 LOG(WARNING) << "Couldn't lock the profile file " << filename << ": " << error; 296 return false; 297 } 298 299 int fd = profile_file->Fd(); 300 301 // We need to clear the data because we don't support appending to the profiles yet. 302 if (!profile_file->ClearContent()) { 303 PLOG(WARNING) << "Could not clear profile file: " << filename; 304 return false; 305 } 306 307 // This doesn't need locking because we are trying to lock the file for exclusive 308 // access and fail immediately if we can't. 309 bool result = Save(fd); 310 if (result) { 311 int64_t size = OS::GetFileSizeBytes(filename.c_str()); 312 if (size != -1) { 313 VLOG(profiler) 314 << "Successfully saved profile info to " << filename << " Size: " 315 << size; 316 if (bytes_written != nullptr) { 317 *bytes_written = static_cast<uint64_t>(size); 318 } 319 } 320 } else { 321 VLOG(profiler) << "Failed to save profile info to " << filename; 322 } 323 return result; 324 } 325 326 // Returns true if all the bytes were successfully written to the file descriptor. 327 static bool WriteBuffer(int fd, const uint8_t* buffer, size_t byte_count) { 328 while (byte_count > 0) { 329 int bytes_written = TEMP_FAILURE_RETRY(write(fd, buffer, byte_count)); 330 if (bytes_written == -1) { 331 return false; 332 } 333 byte_count -= bytes_written; // Reduce the number of remaining bytes. 334 buffer += bytes_written; // Move the buffer forward. 335 } 336 return true; 337 } 338 339 // Add the string bytes to the buffer. 340 static void AddStringToBuffer(std::vector<uint8_t>* buffer, const std::string& value) { 341 buffer->insert(buffer->end(), value.begin(), value.end()); 342 } 343 344 // Insert each byte, from low to high into the buffer. 345 template <typename T> 346 static void AddUintToBuffer(std::vector<uint8_t>* buffer, T value) { 347 for (size_t i = 0; i < sizeof(T); i++) { 348 buffer->push_back((value >> (i * kBitsPerByte)) & 0xff); 349 } 350 } 351 352 static constexpr size_t kLineHeaderSize = 353 2 * sizeof(uint16_t) + // class_set.size + dex_location.size 354 3 * sizeof(uint32_t); // method_map.size + checksum + num_method_ids 355 356 /** 357 * Serialization format: 358 * [profile_header, zipped[[profile_line_header1, profile_line_header2...],[profile_line_data1, 359 * profile_line_data2...]],global_aggregation_counter] 360 * profile_header: 361 * magic,version,number_of_dex_files,uncompressed_size_of_zipped_data,compressed_data_size 362 * profile_line_header: 363 * dex_location,number_of_classes,methods_region_size,dex_location_checksum,num_method_ids 364 * profile_line_data: 365 * method_encoding_1,method_encoding_2...,class_id1,class_id2...,startup/post startup bitmap, 366 * num_classes,class_counters,num_methods,method_counters 367 * The aggregation counters are only stored if the profile version is kProfileVersionWithCounters. 368 * The method_encoding is: 369 * method_id,number_of_inline_caches,inline_cache1,inline_cache2... 370 * The inline_cache is: 371 * dex_pc,[M|dex_map_size], dex_profile_index,class_id1,class_id2...,dex_profile_index2,... 372 * dex_map_size is the number of dex_indeces that follows. 373 * Classes are grouped per their dex files and the line 374 * `dex_profile_index,class_id1,class_id2...,dex_profile_index2,...` encodes the 375 * mapping from `dex_profile_index` to the set of classes `class_id1,class_id2...` 376 * M stands for megamorphic or missing types and it's encoded as either 377 * the byte kIsMegamorphicEncoding or kIsMissingTypesEncoding. 378 * When present, there will be no class ids following. 379 **/ 380 bool ProfileCompilationInfo::Save(int fd) { 381 uint64_t start = NanoTime(); 382 ScopedTrace trace(__PRETTY_FUNCTION__); 383 DCHECK_GE(fd, 0); 384 385 // Use a vector wrapper to avoid keeping track of offsets when we add elements. 386 std::vector<uint8_t> buffer; 387 if (!WriteBuffer(fd, kProfileMagic, sizeof(kProfileMagic))) { 388 return false; 389 } 390 if (!WriteBuffer(fd, version_, sizeof(version_))) { 391 return false; 392 } 393 DCHECK_LE(info_.size(), std::numeric_limits<uint8_t>::max()); 394 AddUintToBuffer(&buffer, static_cast<uint8_t>(info_.size())); 395 396 uint32_t required_capacity = 0; 397 for (const DexFileData* dex_data_ptr : info_) { 398 const DexFileData& dex_data = *dex_data_ptr; 399 uint32_t methods_region_size = GetMethodsRegionSize(dex_data); 400 required_capacity += kLineHeaderSize + 401 dex_data.profile_key.size() + 402 sizeof(uint16_t) * dex_data.class_set.size() + 403 methods_region_size + 404 dex_data.bitmap_storage.size(); 405 if (StoresAggregationCounters()) { 406 required_capacity += sizeof(uint16_t) + // num class counters 407 sizeof(uint16_t) * dex_data.class_set.size() + 408 sizeof(uint16_t) + // num method counter 409 sizeof(uint16_t) * dex_data_ptr->GetNumMethodCounters(); 410 } 411 } 412 if (StoresAggregationCounters()) { 413 required_capacity += sizeof(uint16_t); // global counter 414 } 415 416 // Allow large profiles for non target builds for the case where we are merging many profiles 417 // to generate a boot image profile. 418 if (kIsTargetBuild && required_capacity > kProfileSizeErrorThresholdInBytes) { 419 LOG(ERROR) << "Profile data size exceeds " 420 << std::to_string(kProfileSizeErrorThresholdInBytes) 421 << " bytes. Profile will not be written to disk."; 422 return false; 423 } 424 AddUintToBuffer(&buffer, required_capacity); 425 if (!WriteBuffer(fd, buffer.data(), buffer.size())) { 426 return false; 427 } 428 // Make sure that the buffer has enough capacity to avoid repeated resizings 429 // while we add data. 430 buffer.reserve(required_capacity); 431 buffer.clear(); 432 433 // Dex files must be written in the order of their profile index. This 434 // avoids writing the index in the output file and simplifies the parsing logic. 435 // Write profile line headers. 436 for (const DexFileData* dex_data_ptr : info_) { 437 const DexFileData& dex_data = *dex_data_ptr; 438 439 if (dex_data.profile_key.size() >= kMaxDexFileKeyLength) { 440 LOG(WARNING) << "DexFileKey exceeds allocated limit"; 441 return false; 442 } 443 444 uint32_t methods_region_size = GetMethodsRegionSize(dex_data); 445 446 DCHECK_LE(dex_data.profile_key.size(), std::numeric_limits<uint16_t>::max()); 447 DCHECK_LE(dex_data.class_set.size(), std::numeric_limits<uint16_t>::max()); 448 // Write profile line header. 449 AddUintToBuffer(&buffer, static_cast<uint16_t>(dex_data.profile_key.size())); 450 AddUintToBuffer(&buffer, static_cast<uint16_t>(dex_data.class_set.size())); 451 AddUintToBuffer(&buffer, methods_region_size); // uint32_t 452 AddUintToBuffer(&buffer, dex_data.checksum); // uint32_t 453 AddUintToBuffer(&buffer, dex_data.num_method_ids); // uint32_t 454 455 AddStringToBuffer(&buffer, dex_data.profile_key); 456 } 457 458 for (const DexFileData* dex_data_ptr : info_) { 459 const DexFileData& dex_data = *dex_data_ptr; 460 461 // Note that we allow dex files without any methods or classes, so that 462 // inline caches can refer valid dex files. 463 464 uint16_t last_method_index = 0; 465 for (const auto& method_it : dex_data.method_map) { 466 // Store the difference between the method indices. The SafeMap is ordered by 467 // method_id, so the difference will always be non negative. 468 DCHECK_GE(method_it.first, last_method_index); 469 uint16_t diff_with_last_method_index = method_it.first - last_method_index; 470 last_method_index = method_it.first; 471 AddUintToBuffer(&buffer, diff_with_last_method_index); 472 AddInlineCacheToBuffer(&buffer, method_it.second); 473 } 474 475 uint16_t last_class_index = 0; 476 for (const auto& class_id : dex_data.class_set) { 477 // Store the difference between the class indices. The set is ordered by 478 // class_id, so the difference will always be non negative. 479 DCHECK_GE(class_id.index_, last_class_index); 480 uint16_t diff_with_last_class_index = class_id.index_ - last_class_index; 481 last_class_index = class_id.index_; 482 AddUintToBuffer(&buffer, diff_with_last_class_index); 483 } 484 485 buffer.insert(buffer.end(), 486 dex_data.bitmap_storage.begin(), 487 dex_data.bitmap_storage.end()); 488 489 if (StoresAggregationCounters()) { 490 AddUintToBuffer(&buffer, static_cast<uint16_t>(dex_data.class_set.size())); 491 for (const auto& class_id : dex_data.class_set) { 492 uint16_t type_idx = class_id.index_; 493 AddUintToBuffer(&buffer, dex_data.class_counters[type_idx]); 494 } 495 AddUintToBuffer(&buffer, dex_data.GetNumMethodCounters()); 496 for (uint16_t method_idx = 0; method_idx < dex_data.num_method_ids; method_idx++) { 497 if (dex_data.GetHotnessInfo(method_idx).IsInProfile()) { 498 AddUintToBuffer(&buffer, dex_data.method_counters[method_idx]); 499 } 500 } 501 } 502 } 503 504 if (StoresAggregationCounters()) { 505 AddUintToBuffer(&buffer, aggregation_count_); 506 } 507 508 uint32_t output_size = 0; 509 std::unique_ptr<uint8_t[]> compressed_buffer = DeflateBuffer(buffer.data(), 510 required_capacity, 511 &output_size); 512 513 if (output_size > kProfileSizeWarningThresholdInBytes) { 514 LOG(WARNING) << "Profile data size exceeds " 515 << std::to_string(kProfileSizeWarningThresholdInBytes); 516 } 517 518 buffer.clear(); 519 AddUintToBuffer(&buffer, output_size); 520 521 if (!WriteBuffer(fd, buffer.data(), buffer.size())) { 522 return false; 523 } 524 if (!WriteBuffer(fd, compressed_buffer.get(), output_size)) { 525 return false; 526 } 527 uint64_t total_time = NanoTime() - start; 528 VLOG(profiler) << "Compressed from " 529 << std::to_string(required_capacity) 530 << " to " 531 << std::to_string(output_size); 532 VLOG(profiler) << "Time to save profile: " << std::to_string(total_time); 533 return true; 534 } 535 536 void ProfileCompilationInfo::AddInlineCacheToBuffer(std::vector<uint8_t>* buffer, 537 const InlineCacheMap& inline_cache_map) { 538 // Add inline cache map size. 539 AddUintToBuffer(buffer, static_cast<uint16_t>(inline_cache_map.size())); 540 if (inline_cache_map.size() == 0) { 541 return; 542 } 543 for (const auto& inline_cache_it : inline_cache_map) { 544 uint16_t dex_pc = inline_cache_it.first; 545 const DexPcData dex_pc_data = inline_cache_it.second; 546 const ClassSet& classes = dex_pc_data.classes; 547 548 // Add the dex pc. 549 AddUintToBuffer(buffer, dex_pc); 550 551 // Add the megamorphic/missing_types encoding if needed and continue. 552 // In either cases we don't add any classes to the profiles and so there's 553 // no point to continue. 554 // TODO(calin): in case we miss types there is still value to add the 555 // rest of the classes. They can be added without bumping the profile version. 556 if (dex_pc_data.is_missing_types) { 557 DCHECK(!dex_pc_data.is_megamorphic); // at this point the megamorphic flag should not be set. 558 DCHECK_EQ(classes.size(), 0u); 559 AddUintToBuffer(buffer, kIsMissingTypesEncoding); 560 continue; 561 } else if (dex_pc_data.is_megamorphic) { 562 DCHECK_EQ(classes.size(), 0u); 563 AddUintToBuffer(buffer, kIsMegamorphicEncoding); 564 continue; 565 } 566 567 DCHECK_LT(classes.size(), ProfileCompilationInfo::kIndividualInlineCacheSize); 568 DCHECK_NE(classes.size(), 0u) << "InlineCache contains a dex_pc with 0 classes"; 569 570 SafeMap<uint8_t, std::vector<dex::TypeIndex>> dex_to_classes_map; 571 // Group the classes by dex. We expect that most of the classes will come from 572 // the same dex, so this will be more efficient than encoding the dex index 573 // for each class reference. 574 GroupClassesByDex(classes, &dex_to_classes_map); 575 // Add the dex map size. 576 AddUintToBuffer(buffer, static_cast<uint8_t>(dex_to_classes_map.size())); 577 for (const auto& dex_it : dex_to_classes_map) { 578 uint8_t dex_profile_index = dex_it.first; 579 const std::vector<dex::TypeIndex>& dex_classes = dex_it.second; 580 // Add the dex profile index. 581 AddUintToBuffer(buffer, dex_profile_index); 582 // Add the the number of classes for each dex profile index. 583 AddUintToBuffer(buffer, static_cast<uint8_t>(dex_classes.size())); 584 for (size_t i = 0; i < dex_classes.size(); i++) { 585 // Add the type index of the classes. 586 AddUintToBuffer(buffer, dex_classes[i].index_); 587 } 588 } 589 } 590 } 591 592 uint32_t ProfileCompilationInfo::GetMethodsRegionSize(const DexFileData& dex_data) { 593 // ((uint16_t)method index + (uint16_t)inline cache size) * number of methods 594 uint32_t size = 2 * sizeof(uint16_t) * dex_data.method_map.size(); 595 for (const auto& method_it : dex_data.method_map) { 596 const InlineCacheMap& inline_cache = method_it.second; 597 size += sizeof(uint16_t) * inline_cache.size(); // dex_pc 598 for (const auto& inline_cache_it : inline_cache) { 599 const ClassSet& classes = inline_cache_it.second.classes; 600 SafeMap<uint8_t, std::vector<dex::TypeIndex>> dex_to_classes_map; 601 GroupClassesByDex(classes, &dex_to_classes_map); 602 size += sizeof(uint8_t); // dex_to_classes_map size 603 for (const auto& dex_it : dex_to_classes_map) { 604 size += sizeof(uint8_t); // dex profile index 605 size += sizeof(uint8_t); // number of classes 606 const std::vector<dex::TypeIndex>& dex_classes = dex_it.second; 607 size += sizeof(uint16_t) * dex_classes.size(); // the actual classes 608 } 609 } 610 } 611 return size; 612 } 613 614 void ProfileCompilationInfo::GroupClassesByDex( 615 const ClassSet& classes, 616 /*out*/SafeMap<uint8_t, std::vector<dex::TypeIndex>>* dex_to_classes_map) { 617 for (const auto& classes_it : classes) { 618 auto dex_it = dex_to_classes_map->FindOrAdd(classes_it.dex_profile_index); 619 dex_it->second.push_back(classes_it.type_index); 620 } 621 } 622 623 ProfileCompilationInfo::DexFileData* ProfileCompilationInfo::GetOrAddDexFileData( 624 const std::string& profile_key, 625 uint32_t checksum, 626 uint32_t num_method_ids) { 627 const auto profile_index_it = profile_key_map_.FindOrAdd(profile_key, profile_key_map_.size()); 628 if (profile_key_map_.size() > std::numeric_limits<uint8_t>::max()) { 629 // Allow only 255 dex files to be profiled. This allows us to save bytes 630 // when encoding. The number is well above what we expect for normal applications. 631 if (kIsDebugBuild) { 632 LOG(ERROR) << "Exceeded the maximum number of dex files (255). Something went wrong"; 633 } 634 profile_key_map_.erase(profile_key); 635 return nullptr; 636 } 637 638 uint8_t profile_index = profile_index_it->second; 639 if (info_.size() <= profile_index) { 640 // This is a new addition. Add it to the info_ array. 641 DexFileData* dex_file_data = new (&allocator_) DexFileData( 642 &allocator_, 643 profile_key, 644 checksum, 645 profile_index, 646 num_method_ids, 647 StoresAggregationCounters()); 648 info_.push_back(dex_file_data); 649 } 650 DexFileData* result = info_[profile_index]; 651 652 // Check that the checksum matches. 653 // This may different if for example the dex file was updated and we had a record of the old one. 654 if (result->checksum != checksum) { 655 LOG(WARNING) << "Checksum mismatch for dex " << profile_key; 656 return nullptr; 657 } 658 659 // DCHECK that profile info map key is consistent with the one stored in the dex file data. 660 // This should always be the case since since the cache map is managed by ProfileCompilationInfo. 661 DCHECK_EQ(profile_key, result->profile_key); 662 DCHECK_EQ(profile_index, result->profile_index); 663 664 if (num_method_ids != result->num_method_ids) { 665 // This should not happen... added to help investigating b/65812889. 666 LOG(ERROR) << "num_method_ids mismatch for dex " << profile_key 667 << ", expected=" << num_method_ids 668 << ", actual=" << result->num_method_ids; 669 return nullptr; 670 } 671 672 return result; 673 } 674 675 const ProfileCompilationInfo::DexFileData* ProfileCompilationInfo::FindDexData( 676 const std::string& profile_key, 677 uint32_t checksum, 678 bool verify_checksum) const { 679 const auto profile_index_it = profile_key_map_.find(profile_key); 680 if (profile_index_it == profile_key_map_.end()) { 681 return nullptr; 682 } 683 684 uint8_t profile_index = profile_index_it->second; 685 const DexFileData* result = info_[profile_index]; 686 if (verify_checksum && !ChecksumMatch(result->checksum, checksum)) { 687 return nullptr; 688 } 689 DCHECK_EQ(profile_key, result->profile_key); 690 DCHECK_EQ(profile_index, result->profile_index); 691 return result; 692 } 693 694 bool ProfileCompilationInfo::AddResolvedClasses(const DexCacheResolvedClasses& classes) { 695 const std::string dex_location = GetProfileDexFileKey(classes.GetDexLocation()); 696 const uint32_t checksum = classes.GetLocationChecksum(); 697 DexFileData* const data = GetOrAddDexFileData(dex_location, checksum, classes.NumMethodIds()); 698 if (data == nullptr) { 699 return false; 700 } 701 data->class_set.insert(classes.GetClasses().begin(), classes.GetClasses().end()); 702 return true; 703 } 704 705 bool ProfileCompilationInfo::AddMethod(const std::string& dex_location, 706 uint32_t dex_checksum, 707 uint16_t method_index, 708 uint32_t num_method_ids, 709 const OfflineProfileMethodInfo& pmi, 710 MethodHotness::Flag flags) { 711 DexFileData* const data = GetOrAddDexFileData(GetProfileDexFileKey(dex_location), 712 dex_checksum, 713 num_method_ids); 714 if (data == nullptr) { 715 // The data is null if there is a mismatch in the checksum or number of method ids. 716 return false; 717 } 718 719 // Add the method. 720 InlineCacheMap* inline_cache = data->FindOrAddMethod(method_index); 721 if (inline_cache == nullptr) { 722 // Happens if the method index is outside the range (i.e. is greater then the number 723 // of methods in the dex file). This should not happen during normal execution, 724 // But tools (e.g. boot image aggregation tools) and tests stress this behaviour. 725 return false; 726 } 727 728 data->SetMethodHotness(method_index, flags); 729 730 if (pmi.inline_caches == nullptr) { 731 // If we don't have inline caches return success right away. 732 return true; 733 } 734 for (const auto& pmi_inline_cache_it : *pmi.inline_caches) { 735 uint16_t pmi_ic_dex_pc = pmi_inline_cache_it.first; 736 const DexPcData& pmi_ic_dex_pc_data = pmi_inline_cache_it.second; 737 DexPcData* dex_pc_data = FindOrAddDexPc(inline_cache, pmi_ic_dex_pc); 738 if (dex_pc_data->is_missing_types || dex_pc_data->is_megamorphic) { 739 // We are already megamorphic or we are missing types; no point in going forward. 740 continue; 741 } 742 743 if (pmi_ic_dex_pc_data.is_missing_types) { 744 dex_pc_data->SetIsMissingTypes(); 745 continue; 746 } 747 if (pmi_ic_dex_pc_data.is_megamorphic) { 748 dex_pc_data->SetIsMegamorphic(); 749 continue; 750 } 751 752 for (const ClassReference& class_ref : pmi_ic_dex_pc_data.classes) { 753 const DexReference& dex_ref = pmi.dex_references[class_ref.dex_profile_index]; 754 DexFileData* class_dex_data = GetOrAddDexFileData( 755 GetProfileDexFileKey(dex_ref.dex_location), 756 dex_ref.dex_checksum, 757 dex_ref.num_method_ids); 758 if (class_dex_data == nullptr) { // checksum mismatch 759 return false; 760 } 761 dex_pc_data->AddClass(class_dex_data->profile_index, class_ref.type_index); 762 } 763 } 764 return true; 765 } 766 767 bool ProfileCompilationInfo::AddMethod(const ProfileMethodInfo& pmi, MethodHotness::Flag flags) { 768 DexFileData* const data = GetOrAddDexFileData(pmi.ref.dex_file); 769 if (data == nullptr) { // checksum mismatch 770 return false; 771 } 772 InlineCacheMap* inline_cache = data->FindOrAddMethod(pmi.ref.index); 773 if (inline_cache == nullptr) { 774 return false; 775 } 776 data->SetMethodHotness(pmi.ref.index, flags); 777 778 for (const ProfileMethodInfo::ProfileInlineCache& cache : pmi.inline_caches) { 779 if (cache.is_missing_types) { 780 FindOrAddDexPc(inline_cache, cache.dex_pc)->SetIsMissingTypes(); 781 continue; 782 } 783 for (const TypeReference& class_ref : cache.classes) { 784 DexFileData* class_dex_data = GetOrAddDexFileData(class_ref.dex_file); 785 if (class_dex_data == nullptr) { // checksum mismatch 786 return false; 787 } 788 DexPcData* dex_pc_data = FindOrAddDexPc(inline_cache, cache.dex_pc); 789 if (dex_pc_data->is_missing_types) { 790 // Don't bother adding classes if we are missing types. 791 break; 792 } 793 dex_pc_data->AddClass(class_dex_data->profile_index, class_ref.TypeIndex()); 794 } 795 } 796 return true; 797 } 798 799 bool ProfileCompilationInfo::AddClassIndex(const std::string& dex_location, 800 uint32_t checksum, 801 dex::TypeIndex type_idx, 802 uint32_t num_method_ids) { 803 DexFileData* const data = GetOrAddDexFileData(dex_location, checksum, num_method_ids); 804 if (data == nullptr) { 805 return false; 806 } 807 data->class_set.insert(type_idx); 808 return true; 809 } 810 811 #define READ_UINT(type, buffer, dest, error) \ 812 do { \ 813 if (!(buffer).ReadUintAndAdvance<type>(&(dest))) { \ 814 *(error) = "Could not read "#dest; \ 815 return false; \ 816 } \ 817 } \ 818 while (false) 819 820 bool ProfileCompilationInfo::ReadInlineCache( 821 SafeBuffer& buffer, 822 uint8_t number_of_dex_files, 823 const SafeMap<uint8_t, uint8_t>& dex_profile_index_remap, 824 /*out*/ InlineCacheMap* inline_cache, 825 /*out*/ std::string* error) { 826 uint16_t inline_cache_size; 827 READ_UINT(uint16_t, buffer, inline_cache_size, error); 828 for (; inline_cache_size > 0; inline_cache_size--) { 829 uint16_t dex_pc; 830 uint8_t dex_to_classes_map_size; 831 READ_UINT(uint16_t, buffer, dex_pc, error); 832 READ_UINT(uint8_t, buffer, dex_to_classes_map_size, error); 833 DexPcData* dex_pc_data = FindOrAddDexPc(inline_cache, dex_pc); 834 if (dex_to_classes_map_size == kIsMissingTypesEncoding) { 835 dex_pc_data->SetIsMissingTypes(); 836 continue; 837 } 838 if (dex_to_classes_map_size == kIsMegamorphicEncoding) { 839 dex_pc_data->SetIsMegamorphic(); 840 continue; 841 } 842 for (; dex_to_classes_map_size > 0; dex_to_classes_map_size--) { 843 uint8_t dex_profile_index; 844 uint8_t dex_classes_size; 845 READ_UINT(uint8_t, buffer, dex_profile_index, error); 846 READ_UINT(uint8_t, buffer, dex_classes_size, error); 847 if (dex_profile_index >= number_of_dex_files) { 848 *error = "dex_profile_index out of bounds "; 849 *error += std::to_string(dex_profile_index) + " " + std::to_string(number_of_dex_files); 850 return false; 851 } 852 for (; dex_classes_size > 0; dex_classes_size--) { 853 uint16_t type_index; 854 READ_UINT(uint16_t, buffer, type_index, error); 855 auto it = dex_profile_index_remap.find(dex_profile_index); 856 if (it == dex_profile_index_remap.end()) { 857 // If we don't have an index that's because the dex file was filtered out when loading. 858 // Set missing types on the dex pc data. 859 dex_pc_data->SetIsMissingTypes(); 860 } else { 861 dex_pc_data->AddClass(it->second, dex::TypeIndex(type_index)); 862 } 863 } 864 } 865 } 866 return true; 867 } 868 869 bool ProfileCompilationInfo::ReadMethods(SafeBuffer& buffer, 870 uint8_t number_of_dex_files, 871 const ProfileLineHeader& line_header, 872 const SafeMap<uint8_t, uint8_t>& dex_profile_index_remap, 873 /*out*/std::string* error) { 874 uint32_t unread_bytes_before_operation = buffer.CountUnreadBytes(); 875 if (unread_bytes_before_operation < line_header.method_region_size_bytes) { 876 *error += "Profile EOF reached prematurely for ReadMethod"; 877 return kProfileLoadBadData; 878 } 879 size_t expected_unread_bytes_after_operation = buffer.CountUnreadBytes() 880 - line_header.method_region_size_bytes; 881 uint16_t last_method_index = 0; 882 while (buffer.CountUnreadBytes() > expected_unread_bytes_after_operation) { 883 DexFileData* const data = GetOrAddDexFileData(line_header.dex_location, 884 line_header.checksum, 885 line_header.num_method_ids); 886 uint16_t diff_with_last_method_index; 887 READ_UINT(uint16_t, buffer, diff_with_last_method_index, error); 888 uint16_t method_index = last_method_index + diff_with_last_method_index; 889 last_method_index = method_index; 890 InlineCacheMap* inline_cache = data->FindOrAddMethod(method_index); 891 if (inline_cache == nullptr) { 892 return false; 893 } 894 if (!ReadInlineCache(buffer, 895 number_of_dex_files, 896 dex_profile_index_remap, 897 inline_cache, 898 error)) { 899 return false; 900 } 901 } 902 uint32_t total_bytes_read = unread_bytes_before_operation - buffer.CountUnreadBytes(); 903 if (total_bytes_read != line_header.method_region_size_bytes) { 904 *error += "Profile data inconsistent for ReadMethods"; 905 return false; 906 } 907 return true; 908 } 909 910 bool ProfileCompilationInfo::ReadClasses(SafeBuffer& buffer, 911 const ProfileLineHeader& line_header, 912 /*out*/std::string* error) { 913 size_t unread_bytes_before_op = buffer.CountUnreadBytes(); 914 if (unread_bytes_before_op < line_header.class_set_size) { 915 *error += "Profile EOF reached prematurely for ReadClasses"; 916 return kProfileLoadBadData; 917 } 918 919 uint16_t last_class_index = 0; 920 for (uint16_t i = 0; i < line_header.class_set_size; i++) { 921 uint16_t diff_with_last_class_index; 922 READ_UINT(uint16_t, buffer, diff_with_last_class_index, error); 923 uint16_t type_index = last_class_index + diff_with_last_class_index; 924 last_class_index = type_index; 925 if (!AddClassIndex(line_header.dex_location, 926 line_header.checksum, 927 dex::TypeIndex(type_index), 928 line_header.num_method_ids)) { 929 return false; 930 } 931 } 932 size_t total_bytes_read = unread_bytes_before_op - buffer.CountUnreadBytes(); 933 uint32_t expected_bytes_read = line_header.class_set_size * sizeof(uint16_t); 934 if (total_bytes_read != expected_bytes_read) { 935 *error += "Profile data inconsistent for ReadClasses"; 936 return false; 937 } 938 return true; 939 } 940 941 // Tests for EOF by trying to read 1 byte from the descriptor. 942 // Returns: 943 // 0 if the descriptor is at the EOF, 944 // -1 if there was an IO error 945 // 1 if the descriptor has more content to read 946 static int testEOF(int fd) { 947 uint8_t buffer[1]; 948 return TEMP_FAILURE_RETRY(read(fd, buffer, 1)); 949 } 950 951 // Reads an uint value previously written with AddUintToBuffer. 952 template <typename T> 953 bool ProfileCompilationInfo::SafeBuffer::ReadUintAndAdvance(/*out*/T* value) { 954 static_assert(std::is_unsigned<T>::value, "Type is not unsigned"); 955 if (ptr_current_ + sizeof(T) > ptr_end_) { 956 return false; 957 } 958 *value = 0; 959 for (size_t i = 0; i < sizeof(T); i++) { 960 *value += ptr_current_[i] << (i * kBitsPerByte); 961 } 962 ptr_current_ += sizeof(T); 963 return true; 964 } 965 966 bool ProfileCompilationInfo::SafeBuffer::CompareAndAdvance(const uint8_t* data, size_t data_size) { 967 if (ptr_current_ + data_size > ptr_end_) { 968 return false; 969 } 970 if (memcmp(ptr_current_, data, data_size) == 0) { 971 ptr_current_ += data_size; 972 return true; 973 } 974 return false; 975 } 976 977 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::SafeBuffer::Fill( 978 ProfileSource& source, 979 const std::string& debug_stage, 980 /*out*/ std::string* error) { 981 size_t byte_count = (ptr_end_ - ptr_current_) * sizeof(*ptr_current_); 982 uint8_t* buffer = ptr_current_; 983 return source.Read(buffer, byte_count, debug_stage, error); 984 } 985 986 size_t ProfileCompilationInfo::SafeBuffer::CountUnreadBytes() { 987 return (ptr_end_ - ptr_current_) * sizeof(*ptr_current_); 988 } 989 990 const uint8_t* ProfileCompilationInfo::SafeBuffer::GetCurrentPtr() { 991 return ptr_current_; 992 } 993 994 void ProfileCompilationInfo::SafeBuffer::Advance(size_t data_size) { 995 ptr_current_ += data_size; 996 } 997 998 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::ReadProfileHeader( 999 ProfileSource& source, 1000 /*out*/uint8_t* number_of_dex_files, 1001 /*out*/uint32_t* uncompressed_data_size, 1002 /*out*/uint32_t* compressed_data_size, 1003 /*out*/std::string* error) { 1004 // Read magic and version 1005 const size_t kMagicVersionSize = 1006 sizeof(kProfileMagic) + 1007 kProfileVersionSize + 1008 sizeof(uint8_t) + // number of dex files 1009 sizeof(uint32_t) + // size of uncompressed profile data 1010 sizeof(uint32_t); // size of compressed profile data 1011 1012 SafeBuffer safe_buffer(kMagicVersionSize); 1013 1014 ProfileLoadStatus status = safe_buffer.Fill(source, "ReadProfileHeader", error); 1015 if (status != kProfileLoadSuccess) { 1016 return status; 1017 } 1018 1019 if (!safe_buffer.CompareAndAdvance(kProfileMagic, sizeof(kProfileMagic))) { 1020 *error = "Profile missing magic"; 1021 return kProfileLoadVersionMismatch; 1022 } 1023 if (safe_buffer.CountUnreadBytes() < kProfileVersionSize) { 1024 *error = "Cannot read profile version"; 1025 return kProfileLoadBadData; 1026 } 1027 memcpy(version_, safe_buffer.GetCurrentPtr(), kProfileVersionSize); 1028 safe_buffer.Advance(kProfileVersionSize); 1029 if ((memcmp(version_, kProfileVersion, kProfileVersionSize) != 0) && 1030 (memcmp(version_, kProfileVersionWithCounters, kProfileVersionSize) != 0)) { 1031 *error = "Profile version mismatch"; 1032 return kProfileLoadVersionMismatch; 1033 } 1034 1035 if (!safe_buffer.ReadUintAndAdvance<uint8_t>(number_of_dex_files)) { 1036 *error = "Cannot read the number of dex files"; 1037 return kProfileLoadBadData; 1038 } 1039 if (!safe_buffer.ReadUintAndAdvance<uint32_t>(uncompressed_data_size)) { 1040 *error = "Cannot read the size of uncompressed data"; 1041 return kProfileLoadBadData; 1042 } 1043 if (!safe_buffer.ReadUintAndAdvance<uint32_t>(compressed_data_size)) { 1044 *error = "Cannot read the size of compressed data"; 1045 return kProfileLoadBadData; 1046 } 1047 return kProfileLoadSuccess; 1048 } 1049 1050 bool ProfileCompilationInfo::ReadProfileLineHeaderElements(SafeBuffer& buffer, 1051 /*out*/uint16_t* dex_location_size, 1052 /*out*/ProfileLineHeader* line_header, 1053 /*out*/std::string* error) { 1054 READ_UINT(uint16_t, buffer, *dex_location_size, error); 1055 READ_UINT(uint16_t, buffer, line_header->class_set_size, error); 1056 READ_UINT(uint32_t, buffer, line_header->method_region_size_bytes, error); 1057 READ_UINT(uint32_t, buffer, line_header->checksum, error); 1058 READ_UINT(uint32_t, buffer, line_header->num_method_ids, error); 1059 return true; 1060 } 1061 1062 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::ReadProfileLineHeader( 1063 SafeBuffer& buffer, 1064 /*out*/ProfileLineHeader* line_header, 1065 /*out*/std::string* error) { 1066 if (buffer.CountUnreadBytes() < kLineHeaderSize) { 1067 *error += "Profile EOF reached prematurely for ReadProfileLineHeader"; 1068 return kProfileLoadBadData; 1069 } 1070 1071 uint16_t dex_location_size; 1072 if (!ReadProfileLineHeaderElements(buffer, &dex_location_size, line_header, error)) { 1073 return kProfileLoadBadData; 1074 } 1075 1076 if (dex_location_size == 0 || dex_location_size > kMaxDexFileKeyLength) { 1077 *error = "DexFileKey has an invalid size: " + 1078 std::to_string(static_cast<uint32_t>(dex_location_size)); 1079 return kProfileLoadBadData; 1080 } 1081 1082 if (buffer.CountUnreadBytes() < dex_location_size) { 1083 *error += "Profile EOF reached prematurely for ReadProfileHeaderDexLocation"; 1084 return kProfileLoadBadData; 1085 } 1086 const uint8_t* base_ptr = buffer.GetCurrentPtr(); 1087 line_header->dex_location.assign( 1088 reinterpret_cast<const char*>(base_ptr), dex_location_size); 1089 buffer.Advance(dex_location_size); 1090 return kProfileLoadSuccess; 1091 } 1092 1093 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::ReadProfileLine( 1094 SafeBuffer& buffer, 1095 uint8_t number_of_dex_files, 1096 const ProfileLineHeader& line_header, 1097 const SafeMap<uint8_t, uint8_t>& dex_profile_index_remap, 1098 bool merge_classes, 1099 /*out*/std::string* error) { 1100 DexFileData* data = GetOrAddDexFileData(line_header.dex_location, 1101 line_header.checksum, 1102 line_header.num_method_ids); 1103 if (data == nullptr) { 1104 *error = "Error when reading profile file line header: checksum mismatch for " 1105 + line_header.dex_location; 1106 return kProfileLoadBadData; 1107 } 1108 1109 if (!ReadMethods(buffer, number_of_dex_files, line_header, dex_profile_index_remap, error)) { 1110 return kProfileLoadBadData; 1111 } 1112 1113 if (merge_classes) { 1114 if (!ReadClasses(buffer, line_header, error)) { 1115 return kProfileLoadBadData; 1116 } 1117 } 1118 1119 // Read method bitmap. 1120 const size_t bytes = data->bitmap_storage.size(); 1121 if (buffer.CountUnreadBytes() < bytes) { 1122 *error += "Profile EOF reached prematurely for ReadProfileHeaderDexLocation"; 1123 return kProfileLoadBadData; 1124 } 1125 const uint8_t* base_ptr = buffer.GetCurrentPtr(); 1126 std::copy_n(base_ptr, bytes, data->bitmap_storage.data()); 1127 buffer.Advance(bytes); 1128 1129 if (StoresAggregationCounters()) { 1130 ReadAggregationCounters(buffer, *data, error); 1131 } 1132 1133 return kProfileLoadSuccess; 1134 } 1135 1136 bool ProfileCompilationInfo::ReadAggregationCounters( 1137 SafeBuffer& buffer, 1138 DexFileData& dex_data, 1139 /*out*/std::string* error) { 1140 size_t unread_bytes_before_op = buffer.CountUnreadBytes(); 1141 size_t expected_byte_count = sizeof(uint16_t) * 1142 (dex_data.class_set.size() + dex_data.method_map.size() + 2); 1143 if (unread_bytes_before_op < expected_byte_count) { 1144 *error += "Profile EOF reached prematurely for ReadAggregationCounters"; 1145 return false; 1146 } 1147 1148 uint16_t num_class_counters; 1149 READ_UINT(uint16_t, buffer, num_class_counters, error); 1150 if (num_class_counters != dex_data.class_set.size()) { 1151 *error = "Invalid class size when reading counters"; 1152 return false; 1153 } 1154 for (const auto& class_it : dex_data.class_set) { 1155 READ_UINT(uint16_t, buffer, dex_data.class_counters[class_it.index_], error); 1156 } 1157 1158 uint16_t num_method_counters; 1159 READ_UINT(uint16_t, buffer, num_method_counters, error); 1160 if (num_method_counters != dex_data.GetNumMethodCounters()) { 1161 *error = "Invalid class size when reading counters"; 1162 return false; 1163 } 1164 for (uint16_t method_idx = 0; method_idx < dex_data.num_method_ids; method_idx++) { 1165 if (dex_data.GetHotnessInfo(method_idx).IsInProfile()) { 1166 READ_UINT(uint16_t, buffer, dex_data.method_counters[method_idx], error); 1167 } 1168 } 1169 1170 return true; 1171 } 1172 1173 // TODO(calin): Fix this API. ProfileCompilationInfo::Load should be static and 1174 // return a unique pointer to a ProfileCompilationInfo upon success. 1175 bool ProfileCompilationInfo::Load( 1176 int fd, bool merge_classes, const ProfileLoadFilterFn& filter_fn) { 1177 std::string error; 1178 1179 ProfileLoadStatus status = LoadInternal(fd, &error, merge_classes, filter_fn); 1180 1181 if (status == kProfileLoadSuccess) { 1182 return true; 1183 } else { 1184 LOG(WARNING) << "Error when reading profile: " << error; 1185 return false; 1186 } 1187 } 1188 1189 bool ProfileCompilationInfo::VerifyProfileData(const std::vector<const DexFile*>& dex_files) { 1190 std::unordered_map<std::string, const DexFile*> key_to_dex_file; 1191 for (const DexFile* dex_file : dex_files) { 1192 key_to_dex_file.emplace(GetProfileDexFileKey(dex_file->GetLocation()), dex_file); 1193 } 1194 for (const DexFileData* dex_data : info_) { 1195 const auto it = key_to_dex_file.find(dex_data->profile_key); 1196 if (it == key_to_dex_file.end()) { 1197 // It is okay if profile contains data for additional dex files. 1198 continue; 1199 } 1200 const DexFile* dex_file = it->second; 1201 const std::string& dex_location = dex_file->GetLocation(); 1202 if (!ChecksumMatch(dex_data->checksum, dex_file->GetLocationChecksum())) { 1203 LOG(ERROR) << "Dex checksum mismatch while verifying profile " 1204 << "dex location " << dex_location << " (checksum=" 1205 << dex_file->GetLocationChecksum() << ", profile checksum=" 1206 << dex_data->checksum; 1207 return false; 1208 } 1209 1210 if (dex_data->num_method_ids != dex_file->NumMethodIds()) { 1211 LOG(ERROR) << "Number of method ids in dex file and profile don't match." 1212 << "dex location " << dex_location << " NumMethodId in DexFile" 1213 << dex_file->NumMethodIds() << ", NumMethodId in profile" 1214 << dex_data->num_method_ids; 1215 return false; 1216 } 1217 1218 // Verify method_encoding. 1219 for (const auto& method_it : dex_data->method_map) { 1220 size_t method_id = (size_t)(method_it.first); 1221 if (method_id >= dex_file->NumMethodIds()) { 1222 LOG(ERROR) << "Invalid method id in profile file. dex location=" 1223 << dex_location << " method_id=" << method_id << " NumMethodIds=" 1224 << dex_file->NumMethodIds(); 1225 return false; 1226 } 1227 1228 // Verify class indices of inline caches. 1229 const InlineCacheMap &inline_cache_map = method_it.second; 1230 for (const auto& inline_cache_it : inline_cache_map) { 1231 const DexPcData dex_pc_data = inline_cache_it.second; 1232 if (dex_pc_data.is_missing_types || dex_pc_data.is_megamorphic) { 1233 // No class indices to verify. 1234 continue; 1235 } 1236 1237 const ClassSet &classes = dex_pc_data.classes; 1238 SafeMap<uint8_t, std::vector<dex::TypeIndex>> dex_to_classes_map; 1239 // Group the classes by dex. We expect that most of the classes will come from 1240 // the same dex, so this will be more efficient than encoding the dex index 1241 // for each class reference. 1242 GroupClassesByDex(classes, &dex_to_classes_map); 1243 for (const auto &dex_it : dex_to_classes_map) { 1244 uint8_t dex_profile_index = dex_it.first; 1245 const auto dex_file_inline_cache_it = key_to_dex_file.find( 1246 info_[dex_profile_index]->profile_key); 1247 if (dex_file_inline_cache_it == key_to_dex_file.end()) { 1248 // It is okay if profile contains data for additional dex files. 1249 continue; 1250 } 1251 const DexFile *dex_file_for_inline_cache_check = dex_file_inline_cache_it->second; 1252 const std::vector<dex::TypeIndex> &dex_classes = dex_it.second; 1253 for (size_t i = 0; i < dex_classes.size(); i++) { 1254 if (dex_classes[i].index_ >= dex_file_for_inline_cache_check->NumTypeIds()) { 1255 LOG(ERROR) << "Invalid inline cache in profile file. dex location=" 1256 << dex_location << " method_id=" << method_id 1257 << " dex_profile_index=" 1258 << static_cast<uint16_t >(dex_profile_index) << " type_index=" 1259 << dex_classes[i].index_ 1260 << " NumTypeIds=" 1261 << dex_file_for_inline_cache_check->NumTypeIds(); 1262 return false; 1263 } 1264 } 1265 } 1266 } 1267 } 1268 // Verify class_ids. 1269 for (const auto& class_id : dex_data->class_set) { 1270 if (class_id.index_ >= dex_file->NumTypeIds()) { 1271 LOG(ERROR) << "Invalid class id in profile file. dex_file location " 1272 << dex_location << " class_id=" << class_id.index_ << " NumClassIds=" 1273 << dex_file->NumClassDefs(); 1274 return false; 1275 } 1276 } 1277 } 1278 return true; 1279 } 1280 1281 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::OpenSource( 1282 int32_t fd, 1283 /*out*/ std::unique_ptr<ProfileSource>* source, 1284 /*out*/ std::string* error) { 1285 if (IsProfileFile(fd)) { 1286 source->reset(ProfileSource::Create(fd)); 1287 return kProfileLoadSuccess; 1288 } else { 1289 std::unique_ptr<ZipArchive> zip_archive( 1290 ZipArchive::OpenFromFd(DupCloexec(fd), "profile", error)); 1291 if (zip_archive.get() == nullptr) { 1292 *error = "Could not open the profile zip archive"; 1293 return kProfileLoadBadData; 1294 } 1295 std::unique_ptr<ZipEntry> zip_entry(zip_archive->Find(kDexMetadataProfileEntry, error)); 1296 if (zip_entry == nullptr) { 1297 // Allow archives without the profile entry. In this case, create an empty profile. 1298 // This gives more flexible when ure-using archives that may miss the entry. 1299 // (e.g. dex metadata files) 1300 LOG(WARNING) << "Could not find entry " << kDexMetadataProfileEntry 1301 << " in the zip archive. Creating an empty profile."; 1302 source->reset(ProfileSource::Create(MemMap::Invalid())); 1303 return kProfileLoadSuccess; 1304 } 1305 if (zip_entry->GetUncompressedLength() == 0) { 1306 *error = "Empty profile entry in the zip archive."; 1307 return kProfileLoadBadData; 1308 } 1309 1310 // TODO(calin) pass along file names to assist with debugging. 1311 MemMap map = zip_entry->MapDirectlyOrExtract( 1312 kDexMetadataProfileEntry, "profile file", error, alignof(ProfileSource)); 1313 1314 if (map.IsValid()) { 1315 source->reset(ProfileSource::Create(std::move(map))); 1316 return kProfileLoadSuccess; 1317 } else { 1318 return kProfileLoadBadData; 1319 } 1320 } 1321 } 1322 1323 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::ProfileSource::Read( 1324 uint8_t* buffer, 1325 size_t byte_count, 1326 const std::string& debug_stage, 1327 std::string* error) { 1328 if (IsMemMap()) { 1329 if (mem_map_cur_ + byte_count > mem_map_.Size()) { 1330 return kProfileLoadBadData; 1331 } 1332 for (size_t i = 0; i < byte_count; i++) { 1333 buffer[i] = *(mem_map_.Begin() + mem_map_cur_); 1334 mem_map_cur_++; 1335 } 1336 } else { 1337 while (byte_count > 0) { 1338 int bytes_read = TEMP_FAILURE_RETRY(read(fd_, buffer, byte_count));; 1339 if (bytes_read == 0) { 1340 *error += "Profile EOF reached prematurely for " + debug_stage; 1341 return kProfileLoadBadData; 1342 } else if (bytes_read < 0) { 1343 *error += "Profile IO error for " + debug_stage + strerror(errno); 1344 return kProfileLoadIOError; 1345 } 1346 byte_count -= bytes_read; 1347 buffer += bytes_read; 1348 } 1349 } 1350 return kProfileLoadSuccess; 1351 } 1352 1353 bool ProfileCompilationInfo::ProfileSource::HasConsumedAllData() const { 1354 return IsMemMap() 1355 ? (!mem_map_.IsValid() || mem_map_cur_ == mem_map_.Size()) 1356 : (testEOF(fd_) == 0); 1357 } 1358 1359 bool ProfileCompilationInfo::ProfileSource::HasEmptyContent() const { 1360 if (IsMemMap()) { 1361 return !mem_map_.IsValid() || mem_map_.Size() == 0; 1362 } else { 1363 struct stat stat_buffer; 1364 if (fstat(fd_, &stat_buffer) != 0) { 1365 return false; 1366 } 1367 return stat_buffer.st_size == 0; 1368 } 1369 } 1370 1371 // TODO(calin): fail fast if the dex checksums don't match. 1372 ProfileCompilationInfo::ProfileLoadStatus ProfileCompilationInfo::LoadInternal( 1373 int32_t fd, 1374 std::string* error, 1375 bool merge_classes, 1376 const ProfileLoadFilterFn& filter_fn) { 1377 ScopedTrace trace(__PRETTY_FUNCTION__); 1378 DCHECK_GE(fd, 0); 1379 1380 std::unique_ptr<ProfileSource> source; 1381 ProfileLoadStatus status = OpenSource(fd, &source, error); 1382 if (status != kProfileLoadSuccess) { 1383 return status; 1384 } 1385 1386 // We allow empty profile files. 1387 // Profiles may be created by ActivityManager or installd before we manage to 1388 // process them in the runtime or profman. 1389 if (source->HasEmptyContent()) { 1390 return kProfileLoadSuccess; 1391 } 1392 1393 // Read profile header: magic + version + number_of_dex_files. 1394 uint8_t number_of_dex_files; 1395 uint32_t uncompressed_data_size; 1396 uint32_t compressed_data_size; 1397 status = ReadProfileHeader(*source, 1398 &number_of_dex_files, 1399 &uncompressed_data_size, 1400 &compressed_data_size, 1401 error); 1402 1403 if (status != kProfileLoadSuccess) { 1404 return status; 1405 } 1406 // Allow large profiles for non target builds for the case where we are merging many profiles 1407 // to generate a boot image profile. 1408 if (kIsTargetBuild && uncompressed_data_size > kProfileSizeErrorThresholdInBytes) { 1409 LOG(ERROR) << "Profile data size exceeds " 1410 << std::to_string(kProfileSizeErrorThresholdInBytes) 1411 << " bytes"; 1412 return kProfileLoadBadData; 1413 } 1414 if (uncompressed_data_size > kProfileSizeWarningThresholdInBytes) { 1415 LOG(WARNING) << "Profile data size exceeds " 1416 << std::to_string(kProfileSizeWarningThresholdInBytes) 1417 << " bytes"; 1418 } 1419 1420 std::unique_ptr<uint8_t[]> compressed_data(new uint8_t[compressed_data_size]); 1421 status = source->Read(compressed_data.get(), compressed_data_size, "ReadContent", error); 1422 if (status != kProfileLoadSuccess) { 1423 *error += "Unable to read compressed profile data"; 1424 return status; 1425 } 1426 1427 if (!source->HasConsumedAllData()) { 1428 *error += "Unexpected data in the profile file."; 1429 return kProfileLoadBadData; 1430 } 1431 1432 SafeBuffer uncompressed_data(uncompressed_data_size); 1433 1434 int ret = InflateBuffer(compressed_data.get(), 1435 compressed_data_size, 1436 uncompressed_data_size, 1437 uncompressed_data.Get()); 1438 1439 if (ret != Z_STREAM_END) { 1440 *error += "Error reading uncompressed profile data"; 1441 return kProfileLoadBadData; 1442 } 1443 1444 std::vector<ProfileLineHeader> profile_line_headers; 1445 // Read profile line headers. 1446 for (uint8_t k = 0; k < number_of_dex_files; k++) { 1447 ProfileLineHeader line_header; 1448 1449 // First, read the line header to get the amount of data we need to read. 1450 status = ReadProfileLineHeader(uncompressed_data, &line_header, error); 1451 if (status != kProfileLoadSuccess) { 1452 return status; 1453 } 1454 profile_line_headers.push_back(line_header); 1455 } 1456 1457 SafeMap<uint8_t, uint8_t> dex_profile_index_remap; 1458 if (!RemapProfileIndex(profile_line_headers, filter_fn, &dex_profile_index_remap)) { 1459 return kProfileLoadBadData; 1460 } 1461 1462 for (uint8_t k = 0; k < number_of_dex_files; k++) { 1463 if (!filter_fn(profile_line_headers[k].dex_location, profile_line_headers[k].checksum)) { 1464 // We have to skip the line. Advanced the current pointer of the buffer. 1465 size_t profile_line_size = 1466 profile_line_headers[k].class_set_size * sizeof(uint16_t) + 1467 profile_line_headers[k].method_region_size_bytes + 1468 DexFileData::ComputeBitmapStorage(profile_line_headers[k].num_method_ids); 1469 uncompressed_data.Advance(profile_line_size); 1470 } else { 1471 // Now read the actual profile line. 1472 status = ReadProfileLine(uncompressed_data, 1473 number_of_dex_files, 1474 profile_line_headers[k], 1475 dex_profile_index_remap, 1476 merge_classes, 1477 error); 1478 if (status != kProfileLoadSuccess) { 1479 return status; 1480 } 1481 } 1482 } 1483 1484 if (StoresAggregationCounters()) { 1485 if (!uncompressed_data.ReadUintAndAdvance<uint16_t>(&aggregation_count_)) { 1486 *error = "Cannot read the global aggregation count"; 1487 return kProfileLoadBadData; 1488 } 1489 } 1490 1491 // Check that we read everything and that profiles don't contain junk data. 1492 if (uncompressed_data.CountUnreadBytes() > 0) { 1493 *error = "Unexpected content in the profile file: " + 1494 std::to_string(uncompressed_data.CountUnreadBytes()) + " extra bytes"; 1495 return kProfileLoadBadData; 1496 } else { 1497 return kProfileLoadSuccess; 1498 } 1499 } 1500 1501 bool ProfileCompilationInfo::RemapProfileIndex( 1502 const std::vector<ProfileLineHeader>& profile_line_headers, 1503 const ProfileLoadFilterFn& filter_fn, 1504 /*out*/SafeMap<uint8_t, uint8_t>* dex_profile_index_remap) { 1505 // First verify that all checksums match. This will avoid adding garbage to 1506 // the current profile info. 1507 // Note that the number of elements should be very small, so this should not 1508 // be a performance issue. 1509 for (const ProfileLineHeader& other_profile_line_header : profile_line_headers) { 1510 if (!filter_fn(other_profile_line_header.dex_location, other_profile_line_header.checksum)) { 1511 continue; 1512 } 1513 // verify_checksum is false because we want to differentiate between a missing dex data and 1514 // a mismatched checksum. 1515 const DexFileData* dex_data = FindDexData(other_profile_line_header.dex_location, 1516 /* checksum= */ 0u, 1517 /* verify_checksum= */ false); 1518 if ((dex_data != nullptr) && (dex_data->checksum != other_profile_line_header.checksum)) { 1519 LOG(WARNING) << "Checksum mismatch for dex " << other_profile_line_header.dex_location; 1520 return false; 1521 } 1522 } 1523 // All checksums match. Import the data. 1524 uint32_t num_dex_files = static_cast<uint32_t>(profile_line_headers.size()); 1525 for (uint32_t i = 0; i < num_dex_files; i++) { 1526 if (!filter_fn(profile_line_headers[i].dex_location, profile_line_headers[i].checksum)) { 1527 continue; 1528 } 1529 const DexFileData* dex_data = GetOrAddDexFileData(profile_line_headers[i].dex_location, 1530 profile_line_headers[i].checksum, 1531 profile_line_headers[i].num_method_ids); 1532 if (dex_data == nullptr) { 1533 return false; // Could happen if we exceed the number of allowed dex files. 1534 } 1535 dex_profile_index_remap->Put(i, dex_data->profile_index); 1536 } 1537 return true; 1538 } 1539 1540 std::unique_ptr<uint8_t[]> ProfileCompilationInfo::DeflateBuffer(const uint8_t* in_buffer, 1541 uint32_t in_size, 1542 uint32_t* compressed_data_size) { 1543 z_stream strm; 1544 strm.zalloc = Z_NULL; 1545 strm.zfree = Z_NULL; 1546 strm.opaque = Z_NULL; 1547 int ret = deflateInit(&strm, 1); 1548 if (ret != Z_OK) { 1549 return nullptr; 1550 } 1551 1552 uint32_t out_size = deflateBound(&strm, in_size); 1553 1554 std::unique_ptr<uint8_t[]> compressed_buffer(new uint8_t[out_size]); 1555 strm.avail_in = in_size; 1556 strm.next_in = const_cast<uint8_t*>(in_buffer); 1557 strm.avail_out = out_size; 1558 strm.next_out = &compressed_buffer[0]; 1559 ret = deflate(&strm, Z_FINISH); 1560 if (ret == Z_STREAM_ERROR) { 1561 return nullptr; 1562 } 1563 *compressed_data_size = out_size - strm.avail_out; 1564 deflateEnd(&strm); 1565 return compressed_buffer; 1566 } 1567 1568 int ProfileCompilationInfo::InflateBuffer(const uint8_t* in_buffer, 1569 uint32_t in_size, 1570 uint32_t expected_uncompressed_data_size, 1571 uint8_t* out_buffer) { 1572 z_stream strm; 1573 1574 /* allocate inflate state */ 1575 strm.zalloc = Z_NULL; 1576 strm.zfree = Z_NULL; 1577 strm.opaque = Z_NULL; 1578 strm.avail_in = in_size; 1579 strm.next_in = const_cast<uint8_t*>(in_buffer); 1580 strm.avail_out = expected_uncompressed_data_size; 1581 strm.next_out = out_buffer; 1582 1583 int ret; 1584 inflateInit(&strm); 1585 ret = inflate(&strm, Z_NO_FLUSH); 1586 1587 if (strm.avail_in != 0 || strm.avail_out != 0) { 1588 return Z_DATA_ERROR; 1589 } 1590 inflateEnd(&strm); 1591 return ret; 1592 } 1593 1594 bool ProfileCompilationInfo::MergeWith(const ProfileCompilationInfo& other, 1595 bool merge_classes) { 1596 // First verify that all checksums match. This will avoid adding garbage to 1597 // the current profile info. 1598 // Note that the number of elements should be very small, so this should not 1599 // be a performance issue. 1600 for (const DexFileData* other_dex_data : other.info_) { 1601 // verify_checksum is false because we want to differentiate between a missing dex data and 1602 // a mismatched checksum. 1603 const DexFileData* dex_data = FindDexData(other_dex_data->profile_key, 1604 /* checksum= */ 0u, 1605 /* verify_checksum= */ false); 1606 if ((dex_data != nullptr) && (dex_data->checksum != other_dex_data->checksum)) { 1607 LOG(WARNING) << "Checksum mismatch for dex " << other_dex_data->profile_key; 1608 return false; 1609 } 1610 } 1611 // All checksums match. Import the data. 1612 1613 // The other profile might have a different indexing of dex files. 1614 // That is because each dex files gets a 'dex_profile_index' on a first come first served basis. 1615 // That means that the order in with the methods are added to the profile matters for the 1616 // actual indices. 1617 // The reason we cannot rely on the actual multidex index is that a single profile may store 1618 // data from multiple splits. This means that a profile may contain a classes2.dex from split-A 1619 // and one from split-B. 1620 1621 // First, build a mapping from other_dex_profile_index to this_dex_profile_index. 1622 // This will make sure that the ClassReferences will point to the correct dex file. 1623 SafeMap<uint8_t, uint8_t> dex_profile_index_remap; 1624 for (const DexFileData* other_dex_data : other.info_) { 1625 const DexFileData* dex_data = GetOrAddDexFileData(other_dex_data->profile_key, 1626 other_dex_data->checksum, 1627 other_dex_data->num_method_ids); 1628 if (dex_data == nullptr) { 1629 return false; // Could happen if we exceed the number of allowed dex files. 1630 } 1631 dex_profile_index_remap.Put(other_dex_data->profile_index, dex_data->profile_index); 1632 } 1633 1634 // Merge the actual profile data. 1635 for (const DexFileData* other_dex_data : other.info_) { 1636 DexFileData* dex_data = const_cast<DexFileData*>(FindDexData(other_dex_data->profile_key, 1637 other_dex_data->checksum)); 1638 DCHECK(dex_data != nullptr); 1639 1640 // Merge counters for methods and class. Must be done before we merge the bitmaps so that 1641 // we can tell if the data is new or not. 1642 if (StoresAggregationCounters()) { 1643 // Class aggregation counters. 1644 if (merge_classes) { 1645 for (const dex::TypeIndex& type_idx : other_dex_data->class_set) { 1646 uint16_t amount = other.StoresAggregationCounters() 1647 ? other_dex_data->class_counters[type_idx.index_] 1648 : (dex_data->ContainsClass(type_idx) ? 1 : 0); 1649 1650 dex_data->class_counters[type_idx.index_] = 1651 IncrementAggregationCounter(dex_data->class_counters[type_idx.index_], amount); 1652 } 1653 } 1654 1655 // Method aggregation counters. 1656 for (uint16_t method_idx = 0; method_idx < other_dex_data->num_method_ids; method_idx++) { 1657 if (other_dex_data->GetHotnessInfo(method_idx).IsInProfile()) { 1658 uint16_t amount = other.StoresAggregationCounters() 1659 ? other_dex_data->method_counters[method_idx] 1660 : (dex_data->GetHotnessInfo(method_idx).IsInProfile() ? 1 : 0); 1661 dex_data->method_counters[method_idx] = 1662 IncrementAggregationCounter(dex_data->method_counters[method_idx], amount); 1663 } 1664 } 1665 } 1666 1667 // Merge the classes. 1668 if (merge_classes) { 1669 dex_data->class_set.insert(other_dex_data->class_set.begin(), 1670 other_dex_data->class_set.end()); 1671 } 1672 1673 // Merge the methods and the inline caches. 1674 for (const auto& other_method_it : other_dex_data->method_map) { 1675 uint16_t other_method_index = other_method_it.first; 1676 InlineCacheMap* inline_cache = dex_data->FindOrAddMethod(other_method_index); 1677 if (inline_cache == nullptr) { 1678 return false; 1679 } 1680 const auto& other_inline_cache = other_method_it.second; 1681 for (const auto& other_ic_it : other_inline_cache) { 1682 uint16_t other_dex_pc = other_ic_it.first; 1683 const ClassSet& other_class_set = other_ic_it.second.classes; 1684 DexPcData* dex_pc_data = FindOrAddDexPc(inline_cache, other_dex_pc); 1685 if (other_ic_it.second.is_missing_types) { 1686 dex_pc_data->SetIsMissingTypes(); 1687 } else if (other_ic_it.second.is_megamorphic) { 1688 dex_pc_data->SetIsMegamorphic(); 1689 } else { 1690 for (const auto& class_it : other_class_set) { 1691 dex_pc_data->AddClass(dex_profile_index_remap.Get( 1692 class_it.dex_profile_index), class_it.type_index); 1693 } 1694 } 1695 } 1696 } 1697 1698 // Merge the method bitmaps. 1699 dex_data->MergeBitmap(*other_dex_data); 1700 } 1701 1702 // Global aggregation counter. 1703 if (StoresAggregationCounters()) { 1704 uint16_t amount = other.StoresAggregationCounters() ? other.aggregation_count_ : 1; 1705 aggregation_count_ = IncrementAggregationCounter(aggregation_count_, amount); 1706 } 1707 1708 return true; 1709 } 1710 1711 const ProfileCompilationInfo::DexFileData* ProfileCompilationInfo::FindDexData( 1712 const DexFile* dex_file) const { 1713 return FindDexData(GetProfileDexFileKey(dex_file->GetLocation()), 1714 dex_file->GetLocationChecksum()); 1715 } 1716 1717 ProfileCompilationInfo::MethodHotness ProfileCompilationInfo::GetMethodHotness( 1718 const MethodReference& method_ref) const { 1719 const DexFileData* dex_data = FindDexData(method_ref.dex_file); 1720 return dex_data != nullptr 1721 ? dex_data->GetHotnessInfo(method_ref.index) 1722 : MethodHotness(); 1723 } 1724 1725 bool ProfileCompilationInfo::AddMethodHotness(const MethodReference& method_ref, 1726 const MethodHotness& hotness) { 1727 DexFileData* dex_data = GetOrAddDexFileData(method_ref.dex_file); 1728 if (dex_data != nullptr) { 1729 // TODO: Add inline caches. 1730 return dex_data->AddMethod( 1731 static_cast<MethodHotness::Flag>(hotness.GetFlags()), method_ref.index); 1732 } 1733 return false; 1734 } 1735 1736 ProfileCompilationInfo::MethodHotness ProfileCompilationInfo::GetMethodHotness( 1737 const std::string& dex_location, 1738 uint32_t dex_checksum, 1739 uint16_t dex_method_index) const { 1740 const DexFileData* dex_data = FindDexData(GetProfileDexFileKey(dex_location), dex_checksum); 1741 return dex_data != nullptr ? dex_data->GetHotnessInfo(dex_method_index) : MethodHotness(); 1742 } 1743 1744 1745 std::unique_ptr<ProfileCompilationInfo::OfflineProfileMethodInfo> ProfileCompilationInfo::GetMethod( 1746 const std::string& dex_location, 1747 uint32_t dex_checksum, 1748 uint16_t dex_method_index) const { 1749 MethodHotness hotness(GetMethodHotness(dex_location, dex_checksum, dex_method_index)); 1750 if (!hotness.IsHot()) { 1751 return nullptr; 1752 } 1753 const InlineCacheMap* inline_caches = hotness.GetInlineCacheMap(); 1754 DCHECK(inline_caches != nullptr); 1755 std::unique_ptr<OfflineProfileMethodInfo> pmi(new OfflineProfileMethodInfo(inline_caches)); 1756 1757 pmi->dex_references.resize(info_.size()); 1758 for (const DexFileData* dex_data : info_) { 1759 pmi->dex_references[dex_data->profile_index].dex_location = dex_data->profile_key; 1760 pmi->dex_references[dex_data->profile_index].dex_checksum = dex_data->checksum; 1761 pmi->dex_references[dex_data->profile_index].num_method_ids = dex_data->num_method_ids; 1762 } 1763 1764 return pmi; 1765 } 1766 1767 1768 bool ProfileCompilationInfo::ContainsClass(const DexFile& dex_file, dex::TypeIndex type_idx) const { 1769 const DexFileData* dex_data = FindDexData(&dex_file); 1770 return (dex_data != nullptr) && dex_data->ContainsClass(type_idx); 1771 } 1772 1773 uint32_t ProfileCompilationInfo::GetNumberOfMethods() const { 1774 uint32_t total = 0; 1775 for (const DexFileData* dex_data : info_) { 1776 total += dex_data->method_map.size(); 1777 } 1778 return total; 1779 } 1780 1781 uint32_t ProfileCompilationInfo::GetNumberOfResolvedClasses() const { 1782 uint32_t total = 0; 1783 for (const DexFileData* dex_data : info_) { 1784 total += dex_data->class_set.size(); 1785 } 1786 return total; 1787 } 1788 1789 std::string ProfileCompilationInfo::DumpInfo(const std::vector<const DexFile*>& dex_files, 1790 bool print_full_dex_location) const { 1791 std::ostringstream os; 1792 if (info_.empty()) { 1793 return "ProfileInfo: empty"; 1794 } 1795 1796 os << "ProfileInfo:"; 1797 1798 const std::string kFirstDexFileKeySubstitute = "!classes.dex"; 1799 1800 for (const DexFileData* dex_data : info_) { 1801 os << "\n"; 1802 if (print_full_dex_location) { 1803 os << dex_data->profile_key; 1804 } else { 1805 // Replace the (empty) multidex suffix of the first key with a substitute for easier reading. 1806 std::string multidex_suffix = DexFileLoader::GetMultiDexSuffix(dex_data->profile_key); 1807 os << (multidex_suffix.empty() ? kFirstDexFileKeySubstitute : multidex_suffix); 1808 } 1809 os << " [index=" << static_cast<uint32_t>(dex_data->profile_index) << "]"; 1810 os << " [checksum=" << std::hex << dex_data->checksum << "]" << std::dec; 1811 const DexFile* dex_file = nullptr; 1812 for (const DexFile* current : dex_files) { 1813 if (dex_data->profile_key == current->GetLocation() && 1814 dex_data->checksum == current->GetLocationChecksum()) { 1815 dex_file = current; 1816 } 1817 } 1818 os << "\n\thot methods: "; 1819 for (const auto& method_it : dex_data->method_map) { 1820 if (dex_file != nullptr) { 1821 os << "\n\t\t" << dex_file->PrettyMethod(method_it.first, true); 1822 } else { 1823 os << method_it.first; 1824 } 1825 1826 os << "["; 1827 for (const auto& inline_cache_it : method_it.second) { 1828 os << "{" << std::hex << inline_cache_it.first << std::dec << ":"; 1829 if (inline_cache_it.second.is_missing_types) { 1830 os << "MT"; 1831 } else if (inline_cache_it.second.is_megamorphic) { 1832 os << "MM"; 1833 } else { 1834 for (const ClassReference& class_ref : inline_cache_it.second.classes) { 1835 os << "(" << static_cast<uint32_t>(class_ref.dex_profile_index) 1836 << "," << class_ref.type_index.index_ << ")"; 1837 } 1838 } 1839 os << "}"; 1840 } 1841 os << "], "; 1842 } 1843 bool startup = true; 1844 while (true) { 1845 os << "\n\t" << (startup ? "startup methods: " : "post startup methods: "); 1846 for (uint32_t method_idx = 0; method_idx < dex_data->num_method_ids; ++method_idx) { 1847 MethodHotness hotness_info(dex_data->GetHotnessInfo(method_idx)); 1848 if (startup ? hotness_info.IsStartup() : hotness_info.IsPostStartup()) { 1849 if (dex_file != nullptr) { 1850 os << "\n\t\t" << dex_file->PrettyMethod(method_idx, true); 1851 } else { 1852 os << method_idx << ", "; 1853 } 1854 } 1855 } 1856 if (startup == false) { 1857 break; 1858 } 1859 startup = false; 1860 } 1861 os << "\n\tclasses: "; 1862 for (const auto class_it : dex_data->class_set) { 1863 if (dex_file != nullptr) { 1864 os << "\n\t\t" << dex_file->PrettyType(class_it); 1865 } else { 1866 os << class_it.index_ << ","; 1867 } 1868 } 1869 } 1870 return os.str(); 1871 } 1872 1873 bool ProfileCompilationInfo::GetClassesAndMethods( 1874 const DexFile& dex_file, 1875 /*out*/std::set<dex::TypeIndex>* class_set, 1876 /*out*/std::set<uint16_t>* hot_method_set, 1877 /*out*/std::set<uint16_t>* startup_method_set, 1878 /*out*/std::set<uint16_t>* post_startup_method_method_set) const { 1879 std::set<std::string> ret; 1880 const DexFileData* dex_data = FindDexData(&dex_file); 1881 if (dex_data == nullptr) { 1882 return false; 1883 } 1884 for (const auto& it : dex_data->method_map) { 1885 hot_method_set->insert(it.first); 1886 } 1887 for (uint32_t method_idx = 0; method_idx < dex_data->num_method_ids; ++method_idx) { 1888 MethodHotness hotness = dex_data->GetHotnessInfo(method_idx); 1889 if (hotness.IsStartup()) { 1890 startup_method_set->insert(method_idx); 1891 } 1892 if (hotness.IsPostStartup()) { 1893 post_startup_method_method_set->insert(method_idx); 1894 } 1895 } 1896 for (const dex::TypeIndex& type_index : dex_data->class_set) { 1897 class_set->insert(type_index); 1898 } 1899 return true; 1900 } 1901 1902 bool ProfileCompilationInfo::Equals(const ProfileCompilationInfo& other) { 1903 // No need to compare profile_key_map_. That's only a cache for fast search. 1904 // All the information is already in the info_ vector. 1905 if (memcmp(version_, other.version_, kProfileVersionSize) != 0) { 1906 return false; 1907 } 1908 if (info_.size() != other.info_.size()) { 1909 return false; 1910 } 1911 for (size_t i = 0; i < info_.size(); i++) { 1912 const DexFileData& dex_data = *info_[i]; 1913 const DexFileData& other_dex_data = *other.info_[i]; 1914 if (!(dex_data == other_dex_data)) { 1915 return false; 1916 } 1917 } 1918 if (aggregation_count_ != other.aggregation_count_) { 1919 return false; 1920 } 1921 return true; 1922 } 1923 1924 std::set<DexCacheResolvedClasses> ProfileCompilationInfo::GetResolvedClasses( 1925 const std::vector<const DexFile*>& dex_files) const { 1926 std::unordered_map<std::string, const DexFile* > key_to_dex_file; 1927 for (const DexFile* dex_file : dex_files) { 1928 key_to_dex_file.emplace(GetProfileDexFileKey(dex_file->GetLocation()), dex_file); 1929 } 1930 std::set<DexCacheResolvedClasses> ret; 1931 for (const DexFileData* dex_data : info_) { 1932 const auto it = key_to_dex_file.find(dex_data->profile_key); 1933 if (it != key_to_dex_file.end()) { 1934 const DexFile* dex_file = it->second; 1935 const std::string& dex_location = dex_file->GetLocation(); 1936 if (dex_data->checksum != it->second->GetLocationChecksum()) { 1937 LOG(ERROR) << "Dex checksum mismatch when getting resolved classes from profile for " 1938 << "location " << dex_location << " (checksum=" << dex_file->GetLocationChecksum() 1939 << ", profile checksum=" << dex_data->checksum; 1940 return std::set<DexCacheResolvedClasses>(); 1941 } 1942 DexCacheResolvedClasses classes(dex_location, 1943 dex_location, 1944 dex_data->checksum, 1945 dex_data->num_method_ids); 1946 classes.AddClasses(dex_data->class_set.begin(), dex_data->class_set.end()); 1947 ret.insert(classes); 1948 } 1949 } 1950 return ret; 1951 } 1952 1953 // Naive implementation to generate a random profile file suitable for testing. 1954 bool ProfileCompilationInfo::GenerateTestProfile(int fd, 1955 uint16_t number_of_dex_files, 1956 uint16_t method_percentage, 1957 uint16_t class_percentage, 1958 uint32_t random_seed) { 1959 const std::string base_dex_location = "base.apk"; 1960 ProfileCompilationInfo info; 1961 // The limits are defined by the dex specification. 1962 const uint16_t max_method = std::numeric_limits<uint16_t>::max(); 1963 const uint16_t max_classes = std::numeric_limits<uint16_t>::max(); 1964 uint16_t number_of_methods = max_method * method_percentage / 100; 1965 uint16_t number_of_classes = max_classes * class_percentage / 100; 1966 1967 std::srand(random_seed); 1968 1969 // Make sure we generate more samples with a low index value. 1970 // This makes it more likely to hit valid method/class indices in small apps. 1971 const uint16_t kFavorFirstN = 10000; 1972 const uint16_t kFavorSplit = 2; 1973 1974 for (uint16_t i = 0; i < number_of_dex_files; i++) { 1975 std::string dex_location = DexFileLoader::GetMultiDexLocation(i, base_dex_location.c_str()); 1976 std::string profile_key = GetProfileDexFileKey(dex_location); 1977 1978 for (uint16_t m = 0; m < number_of_methods; m++) { 1979 uint16_t method_idx = rand() % max_method; 1980 if (m < (number_of_methods / kFavorSplit)) { 1981 method_idx %= kFavorFirstN; 1982 } 1983 // Alternate between startup and post startup. 1984 uint32_t flags = MethodHotness::kFlagHot; 1985 flags |= ((m & 1) != 0) ? MethodHotness::kFlagPostStartup : MethodHotness::kFlagStartup; 1986 info.AddMethodIndex(static_cast<MethodHotness::Flag>(flags), 1987 profile_key, 1988 /*checksum=*/ 0, 1989 method_idx, 1990 max_method); 1991 } 1992 1993 for (uint16_t c = 0; c < number_of_classes; c++) { 1994 uint16_t type_idx = rand() % max_classes; 1995 if (c < (number_of_classes / kFavorSplit)) { 1996 type_idx %= kFavorFirstN; 1997 } 1998 info.AddClassIndex(profile_key, 0, dex::TypeIndex(type_idx), max_method); 1999 } 2000 } 2001 return info.Save(fd); 2002 } 2003 2004 // Naive implementation to generate a random profile file suitable for testing. 2005 // Description of random selection: 2006 // * Select a random starting point S. 2007 // * For every index i, add (S+i) % (N - total number of methods/classes) to profile with the 2008 // probably of 1/(N - i - number of methods/classes needed to add in profile). 2009 bool ProfileCompilationInfo::GenerateTestProfile( 2010 int fd, 2011 std::vector<std::unique_ptr<const DexFile>>& dex_files, 2012 uint16_t method_percentage, 2013 uint16_t class_percentage, 2014 uint32_t random_seed) { 2015 ProfileCompilationInfo info; 2016 std::default_random_engine rng(random_seed); 2017 auto create_shuffled_range = [&rng](uint32_t take, uint32_t out_of) { 2018 CHECK_LE(take, out_of); 2019 std::vector<uint32_t> vec(out_of); 2020 std::iota(vec.begin(), vec.end(), 0u); 2021 std::shuffle(vec.begin(), vec.end(), rng); 2022 vec.erase(vec.begin() + take, vec.end()); 2023 std::sort(vec.begin(), vec.end()); 2024 return vec; 2025 }; 2026 for (std::unique_ptr<const DexFile>& dex_file : dex_files) { 2027 const std::string& location = dex_file->GetLocation(); 2028 uint32_t checksum = dex_file->GetLocationChecksum(); 2029 2030 uint32_t number_of_classes = dex_file->NumClassDefs(); 2031 uint32_t classes_required_in_profile = (number_of_classes * class_percentage) / 100; 2032 for (uint32_t class_index : create_shuffled_range(classes_required_in_profile, 2033 number_of_classes)) { 2034 info.AddClassIndex(location, 2035 checksum, 2036 dex_file->GetClassDef(class_index).class_idx_, 2037 dex_file->NumMethodIds()); 2038 } 2039 2040 uint32_t number_of_methods = dex_file->NumMethodIds(); 2041 uint32_t methods_required_in_profile = (number_of_methods * method_percentage) / 100; 2042 for (uint32_t method_index : create_shuffled_range(methods_required_in_profile, 2043 number_of_methods)) { 2044 // Alternate between startup and post startup. 2045 uint32_t flags = MethodHotness::kFlagHot; 2046 flags |= ((method_index & 1) != 0) 2047 ? MethodHotness::kFlagPostStartup 2048 : MethodHotness::kFlagStartup; 2049 info.AddMethodIndex(static_cast<MethodHotness::Flag>(flags), 2050 MethodReference(dex_file.get(), method_index)); 2051 } 2052 } 2053 return info.Save(fd); 2054 } 2055 2056 bool ProfileCompilationInfo::OfflineProfileMethodInfo::operator==( 2057 const OfflineProfileMethodInfo& other) const { 2058 if (inline_caches->size() != other.inline_caches->size()) { 2059 return false; 2060 } 2061 2062 // We can't use a simple equality test because we need to match the dex files 2063 // of the inline caches which might have different profile indexes. 2064 for (const auto& inline_cache_it : *inline_caches) { 2065 uint16_t dex_pc = inline_cache_it.first; 2066 const DexPcData dex_pc_data = inline_cache_it.second; 2067 const auto& other_it = other.inline_caches->find(dex_pc); 2068 if (other_it == other.inline_caches->end()) { 2069 return false; 2070 } 2071 const DexPcData& other_dex_pc_data = other_it->second; 2072 if (dex_pc_data.is_megamorphic != other_dex_pc_data.is_megamorphic || 2073 dex_pc_data.is_missing_types != other_dex_pc_data.is_missing_types) { 2074 return false; 2075 } 2076 for (const ClassReference& class_ref : dex_pc_data.classes) { 2077 bool found = false; 2078 for (const ClassReference& other_class_ref : other_dex_pc_data.classes) { 2079 CHECK_LE(class_ref.dex_profile_index, dex_references.size()); 2080 CHECK_LE(other_class_ref.dex_profile_index, other.dex_references.size()); 2081 const DexReference& dex_ref = dex_references[class_ref.dex_profile_index]; 2082 const DexReference& other_dex_ref = other.dex_references[other_class_ref.dex_profile_index]; 2083 if (class_ref.type_index == other_class_ref.type_index && 2084 dex_ref == other_dex_ref) { 2085 found = true; 2086 break; 2087 } 2088 } 2089 if (!found) { 2090 return false; 2091 } 2092 } 2093 } 2094 return true; 2095 } 2096 2097 bool ProfileCompilationInfo::IsEmpty() const { 2098 DCHECK_EQ(info_.empty(), profile_key_map_.empty()); 2099 return info_.empty(); 2100 } 2101 2102 ProfileCompilationInfo::InlineCacheMap* 2103 ProfileCompilationInfo::DexFileData::FindOrAddMethod(uint16_t method_index) { 2104 if (method_index >= num_method_ids) { 2105 LOG(ERROR) << "Invalid method index " << method_index << ". num_method_ids=" << num_method_ids; 2106 return nullptr; 2107 } 2108 return &(method_map.FindOrAdd( 2109 method_index, 2110 InlineCacheMap(std::less<uint16_t>(), allocator_->Adapter(kArenaAllocProfile)))->second); 2111 } 2112 2113 // Mark a method as executed at least once. 2114 bool ProfileCompilationInfo::DexFileData::AddMethod(MethodHotness::Flag flags, size_t index) { 2115 if (index >= num_method_ids) { 2116 LOG(ERROR) << "Invalid method index " << index << ". num_method_ids=" << num_method_ids; 2117 return false; 2118 } 2119 2120 SetMethodHotness(index, flags); 2121 2122 if ((flags & MethodHotness::kFlagHot) != 0) { 2123 ProfileCompilationInfo::InlineCacheMap* result = FindOrAddMethod(index); 2124 DCHECK(result != nullptr); 2125 } 2126 return true; 2127 } 2128 2129 void ProfileCompilationInfo::DexFileData::SetMethodHotness(size_t index, 2130 MethodHotness::Flag flags) { 2131 DCHECK_LT(index, num_method_ids); 2132 if ((flags & MethodHotness::kFlagStartup) != 0) { 2133 method_bitmap.StoreBit(MethodBitIndex(/*startup=*/ true, index), /*value=*/ true); 2134 } 2135 if ((flags & MethodHotness::kFlagPostStartup) != 0) { 2136 method_bitmap.StoreBit(MethodBitIndex(/*startup=*/ false, index), /*value=*/ true); 2137 } 2138 } 2139 2140 ProfileCompilationInfo::MethodHotness ProfileCompilationInfo::DexFileData::GetHotnessInfo( 2141 uint32_t dex_method_index) const { 2142 MethodHotness ret; 2143 if (method_bitmap.LoadBit(MethodBitIndex(/*startup=*/ true, dex_method_index))) { 2144 ret.AddFlag(MethodHotness::kFlagStartup); 2145 } 2146 if (method_bitmap.LoadBit(MethodBitIndex(/*startup=*/ false, dex_method_index))) { 2147 ret.AddFlag(MethodHotness::kFlagPostStartup); 2148 } 2149 auto it = method_map.find(dex_method_index); 2150 if (it != method_map.end()) { 2151 ret.SetInlineCacheMap(&it->second); 2152 ret.AddFlag(MethodHotness::kFlagHot); 2153 } 2154 return ret; 2155 } 2156 2157 int32_t ProfileCompilationInfo::DexFileData::GetMethodAggregationCounter( 2158 uint16_t method_idx) const { 2159 CHECK_GT(method_counters.size(), method_idx) << "Profile not prepared for aggregation counters"; 2160 if (!GetHotnessInfo(method_idx).IsInProfile()) { 2161 return -1; 2162 } 2163 2164 return method_counters[method_idx]; 2165 } 2166 2167 int32_t ProfileCompilationInfo::DexFileData::GetClassAggregationCounter(uint16_t type_idx) const { 2168 CHECK_GT(class_counters.size(), type_idx) << "Profile not prepared for aggregation counters"; 2169 if (!ContainsClass(dex::TypeIndex(type_idx))) { 2170 return -1; 2171 } 2172 2173 return class_counters[type_idx]; 2174 } 2175 2176 int32_t ProfileCompilationInfo::GetMethodAggregationCounter( 2177 const MethodReference& method_ref) const { 2178 CHECK(StoresAggregationCounters()) << "Profile not prepared for aggregation counters"; 2179 const DexFileData* dex_data = FindDexData(method_ref.dex_file); 2180 return dex_data == nullptr ? -1 : dex_data->GetMethodAggregationCounter(method_ref.index); 2181 } 2182 2183 int32_t ProfileCompilationInfo::GetClassAggregationCounter(const TypeReference& type_ref) const { 2184 CHECK(StoresAggregationCounters()) << "Profile not prepared for aggregation counters"; 2185 const DexFileData* dex_data = FindDexData(type_ref.dex_file); 2186 return dex_data == nullptr ? -1 : dex_data->GetClassAggregationCounter(type_ref.index); 2187 } 2188 2189 uint16_t ProfileCompilationInfo::GetAggregationCounter() const { 2190 CHECK(StoresAggregationCounters()) << "Profile not prepared for aggregation counters"; 2191 return aggregation_count_; 2192 } 2193 2194 ProfileCompilationInfo::DexPcData* 2195 ProfileCompilationInfo::FindOrAddDexPc(InlineCacheMap* inline_cache, uint32_t dex_pc) { 2196 return &(inline_cache->FindOrAdd(dex_pc, DexPcData(&allocator_))->second); 2197 } 2198 2199 HashSet<std::string> ProfileCompilationInfo::GetClassDescriptors( 2200 const std::vector<const DexFile*>& dex_files) { 2201 HashSet<std::string> ret; 2202 for (const DexFile* dex_file : dex_files) { 2203 const DexFileData* data = FindDexData(dex_file); 2204 if (data != nullptr) { 2205 for (dex::TypeIndex type_idx : data->class_set) { 2206 if (!dex_file->IsTypeIndexValid(type_idx)) { 2207 // Something went bad. The profile is probably corrupted. Abort and return an emtpy set. 2208 LOG(WARNING) << "Corrupted profile: invalid type index " 2209 << type_idx.index_ << " in dex " << dex_file->GetLocation(); 2210 return HashSet<std::string>(); 2211 } 2212 const dex::TypeId& type_id = dex_file->GetTypeId(type_idx); 2213 ret.insert(dex_file->GetTypeDescriptor(type_id)); 2214 } 2215 } else { 2216 VLOG(compiler) << "Failed to find profile data for " << dex_file->GetLocation(); 2217 } 2218 } 2219 return ret; 2220 } 2221 2222 bool ProfileCompilationInfo::IsProfileFile(int fd) { 2223 // First check if it's an empty file as we allow empty profile files. 2224 // Profiles may be created by ActivityManager or installd before we manage to 2225 // process them in the runtime or profman. 2226 struct stat stat_buffer; 2227 if (fstat(fd, &stat_buffer) != 0) { 2228 return false; 2229 } 2230 2231 if (stat_buffer.st_size == 0) { 2232 return true; 2233 } 2234 2235 // The files is not empty. Check if it contains the profile magic. 2236 size_t byte_count = sizeof(kProfileMagic); 2237 uint8_t buffer[sizeof(kProfileMagic)]; 2238 if (!android::base::ReadFully(fd, buffer, byte_count)) { 2239 return false; 2240 } 2241 2242 // Reset the offset to prepare the file for reading. 2243 off_t rc = TEMP_FAILURE_RETRY(lseek(fd, 0, SEEK_SET)); 2244 if (rc == static_cast<off_t>(-1)) { 2245 PLOG(ERROR) << "Failed to reset the offset"; 2246 return false; 2247 } 2248 2249 return memcmp(buffer, kProfileMagic, byte_count) == 0; 2250 } 2251 2252 bool ProfileCompilationInfo::UpdateProfileKeys( 2253 const std::vector<std::unique_ptr<const DexFile>>& dex_files) { 2254 for (const std::unique_ptr<const DexFile>& dex_file : dex_files) { 2255 for (DexFileData* dex_data : info_) { 2256 if (dex_data->checksum == dex_file->GetLocationChecksum() 2257 && dex_data->num_method_ids == dex_file->NumMethodIds()) { 2258 std::string new_profile_key = GetProfileDexFileKey(dex_file->GetLocation()); 2259 if (dex_data->profile_key != new_profile_key) { 2260 if (profile_key_map_.find(new_profile_key) != profile_key_map_.end()) { 2261 // We can't update the key if the new key belongs to a different dex file. 2262 LOG(ERROR) << "Cannot update profile key to " << new_profile_key 2263 << " because the new key belongs to another dex file."; 2264 return false; 2265 } 2266 profile_key_map_.erase(dex_data->profile_key); 2267 profile_key_map_.Put(new_profile_key, dex_data->profile_index); 2268 dex_data->profile_key = new_profile_key; 2269 } 2270 } 2271 } 2272 } 2273 return true; 2274 } 2275 2276 bool ProfileCompilationInfo::ProfileFilterFnAcceptAll( 2277 const std::string& dex_location ATTRIBUTE_UNUSED, 2278 uint32_t checksum ATTRIBUTE_UNUSED) { 2279 return true; 2280 } 2281 2282 void ProfileCompilationInfo::ClearData() { 2283 for (DexFileData* data : info_) { 2284 delete data; 2285 } 2286 info_.clear(); 2287 profile_key_map_.clear(); 2288 } 2289 2290 bool ProfileCompilationInfo::StoresAggregationCounters() const { 2291 return memcmp(version_, kProfileVersionWithCounters, sizeof(kProfileVersionWithCounters)) == 0; 2292 } 2293 2294 void ProfileCompilationInfo::PrepareForAggregationCounters() { 2295 InitProfileVersionInternal(kProfileVersionWithCounters); 2296 for (DexFileData* dex_data : info_) { 2297 dex_data->PrepareForAggregationCounters(); 2298 } 2299 } 2300 2301 void ProfileCompilationInfo::DexFileData::PrepareForAggregationCounters() { 2302 method_counters.resize(num_method_ids); 2303 // TODO(calin): we should store the maximum number of types in the profile. 2304 // It will simplify quite a few things and make this storage allocation 2305 // more efficient. 2306 size_t max_elems = 1 << (kBitsPerByte * sizeof(uint16_t)); 2307 class_counters.resize(max_elems); 2308 } 2309 2310 const uint8_t* ProfileCompilationInfo::GetVersion() const { 2311 return version_; 2312 } 2313 2314 void ProfileCompilationInfo::InitProfileVersionInternal(const uint8_t version[]) { 2315 CHECK( 2316 (memcmp(version, kProfileVersion, kProfileVersionSize) == 0) || 2317 (memcmp(version, kProfileVersionWithCounters, kProfileVersionSize) == 0)); 2318 memcpy(version_, version, kProfileVersionSize); 2319 } 2320 2321 uint16_t ProfileCompilationInfo::DexFileData::GetNumMethodCounters() const { 2322 uint16_t num_method_counters = 0; 2323 for (uint16_t method_idx = 0; method_idx < num_method_ids; method_idx++) { 2324 num_method_counters += GetHotnessInfo(method_idx).IsInProfile() ? 1 : 0; 2325 } 2326 return num_method_counters; 2327 } 2328 2329 bool ProfileCompilationInfo::DexFileData::ContainsClass(const dex::TypeIndex type_index) const { 2330 return class_set.find(type_index) != class_set.end(); 2331 } 2332 } // namespace art 2333