1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Preparation and completion of hprof data generation. The output is
19 * written into two files and then combined. This is necessary because
20 * we generate some of the data (strings and classes) while we dump the
21 * heap, and some analysis tools require that the class and string data
22 * appear first.
23 */
24
25 #include "hprof.h"
26
27 #include <errno.h>
28 #include <fcntl.h>
29 #include <stdio.h>
30 #include <string.h>
31 #include <sys/time.h>
32 #include <sys/uio.h>
33 #include <time.h>
34 #include <unistd.h>
35
36 #include <set>
37
38 #include <android-base/logging.h>
39 #include <android-base/stringprintf.h>
40
41 #include "art_field-inl.h"
42 #include "art_method-inl.h"
43 #include "base/array_ref.h"
44 #include "base/file_utils.h"
45 #include "base/logging.h"
46 #include "base/macros.h"
47 #include "base/mutex.h"
48 #include "base/os.h"
49 #include "base/safe_map.h"
50 #include "base/time_utils.h"
51 #include "base/unix_file/fd_file.h"
52 #include "class_linker.h"
53 #include "class_root.h"
54 #include "common_throws.h"
55 #include "debugger.h"
56 #include "dex/dex_file-inl.h"
57 #include "gc/accounting/heap_bitmap.h"
58 #include "gc/allocation_record.h"
59 #include "gc/heap-visit-objects-inl.h"
60 #include "gc/heap.h"
61 #include "gc/scoped_gc_critical_section.h"
62 #include "gc/space/space.h"
63 #include "gc_root.h"
64 #include "mirror/class-inl.h"
65 #include "mirror/class.h"
66 #include "mirror/object-refvisitor-inl.h"
67 #include "runtime_globals.h"
68 #include "scoped_thread_state_change-inl.h"
69 #include "thread_list.h"
70
71 namespace art {
72
73 namespace hprof {
74
75 static constexpr bool kDirectStream = true;
76
77 static constexpr uint32_t kHprofTime = 0;
78 static constexpr uint32_t kHprofNullThread = 0;
79
80 static constexpr size_t kMaxObjectsPerSegment = 128;
81 static constexpr size_t kMaxBytesPerSegment = 4096;
82
83 // The static field-name for the synthetic object generated to account for class static overhead.
84 static constexpr const char* kClassOverheadName = "$classOverhead";
85
86 enum HprofTag {
87 HPROF_TAG_STRING = 0x01,
88 HPROF_TAG_LOAD_CLASS = 0x02,
89 HPROF_TAG_UNLOAD_CLASS = 0x03,
90 HPROF_TAG_STACK_FRAME = 0x04,
91 HPROF_TAG_STACK_TRACE = 0x05,
92 HPROF_TAG_ALLOC_SITES = 0x06,
93 HPROF_TAG_HEAP_SUMMARY = 0x07,
94 HPROF_TAG_START_THREAD = 0x0A,
95 HPROF_TAG_END_THREAD = 0x0B,
96 HPROF_TAG_HEAP_DUMP = 0x0C,
97 HPROF_TAG_HEAP_DUMP_SEGMENT = 0x1C,
98 HPROF_TAG_HEAP_DUMP_END = 0x2C,
99 HPROF_TAG_CPU_SAMPLES = 0x0D,
100 HPROF_TAG_CONTROL_SETTINGS = 0x0E,
101 };
102
103 // Values for the first byte of HEAP_DUMP and HEAP_DUMP_SEGMENT records:
104 enum HprofHeapTag {
105 // Traditional.
106 HPROF_ROOT_UNKNOWN = 0xFF,
107 HPROF_ROOT_JNI_GLOBAL = 0x01,
108 HPROF_ROOT_JNI_LOCAL = 0x02,
109 HPROF_ROOT_JAVA_FRAME = 0x03,
110 HPROF_ROOT_NATIVE_STACK = 0x04,
111 HPROF_ROOT_STICKY_CLASS = 0x05,
112 HPROF_ROOT_THREAD_BLOCK = 0x06,
113 HPROF_ROOT_MONITOR_USED = 0x07,
114 HPROF_ROOT_THREAD_OBJECT = 0x08,
115 HPROF_CLASS_DUMP = 0x20,
116 HPROF_INSTANCE_DUMP = 0x21,
117 HPROF_OBJECT_ARRAY_DUMP = 0x22,
118 HPROF_PRIMITIVE_ARRAY_DUMP = 0x23,
119
120 // Android.
121 HPROF_HEAP_DUMP_INFO = 0xfe,
122 HPROF_ROOT_INTERNED_STRING = 0x89,
123 HPROF_ROOT_FINALIZING = 0x8a, // Obsolete.
124 HPROF_ROOT_DEBUGGER = 0x8b,
125 HPROF_ROOT_REFERENCE_CLEANUP = 0x8c, // Obsolete.
126 HPROF_ROOT_VM_INTERNAL = 0x8d,
127 HPROF_ROOT_JNI_MONITOR = 0x8e,
128 HPROF_UNREACHABLE = 0x90, // Obsolete.
129 HPROF_PRIMITIVE_ARRAY_NODATA_DUMP = 0xc3, // Obsolete.
130 };
131
132 enum HprofHeapId {
133 HPROF_HEAP_DEFAULT = 0,
134 HPROF_HEAP_ZYGOTE = 'Z',
135 HPROF_HEAP_APP = 'A',
136 HPROF_HEAP_IMAGE = 'I',
137 };
138
139 enum HprofBasicType {
140 hprof_basic_object = 2,
141 hprof_basic_boolean = 4,
142 hprof_basic_char = 5,
143 hprof_basic_float = 6,
144 hprof_basic_double = 7,
145 hprof_basic_byte = 8,
146 hprof_basic_short = 9,
147 hprof_basic_int = 10,
148 hprof_basic_long = 11,
149 };
150
151 using HprofStringId = uint32_t;
152 using HprofClassObjectId = uint32_t;
153 using HprofClassSerialNumber = uint32_t;
154 using HprofStackTraceSerialNumber = uint32_t;
155 using HprofStackFrameId = uint32_t;
156 static constexpr HprofStackTraceSerialNumber kHprofNullStackTrace = 0;
157
158 class EndianOutput {
159 public:
EndianOutput()160 EndianOutput() : length_(0), sum_length_(0), max_length_(0), started_(false) {}
~EndianOutput()161 virtual ~EndianOutput() {}
162
StartNewRecord(uint8_t tag,uint32_t time)163 void StartNewRecord(uint8_t tag, uint32_t time) {
164 if (length_ > 0) {
165 EndRecord();
166 }
167 DCHECK_EQ(length_, 0U);
168 AddU1(tag);
169 AddU4(time);
170 AddU4(0xdeaddead); // Length, replaced on flush.
171 started_ = true;
172 }
173
EndRecord()174 void EndRecord() {
175 // Replace length in header.
176 if (started_) {
177 UpdateU4(sizeof(uint8_t) + sizeof(uint32_t),
178 length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
179 }
180
181 HandleEndRecord();
182
183 sum_length_ += length_;
184 max_length_ = std::max(max_length_, length_);
185 length_ = 0;
186 started_ = false;
187 }
188
AddU1(uint8_t value)189 void AddU1(uint8_t value) {
190 AddU1List(&value, 1);
191 }
AddU2(uint16_t value)192 void AddU2(uint16_t value) {
193 AddU2List(&value, 1);
194 }
AddU4(uint32_t value)195 void AddU4(uint32_t value) {
196 AddU4List(&value, 1);
197 }
198
AddU8(uint64_t value)199 void AddU8(uint64_t value) {
200 AddU8List(&value, 1);
201 }
202
AddObjectId(const mirror::Object * value)203 void AddObjectId(const mirror::Object* value) {
204 AddU4(PointerToLowMemUInt32(value));
205 }
206
AddStackTraceSerialNumber(HprofStackTraceSerialNumber value)207 void AddStackTraceSerialNumber(HprofStackTraceSerialNumber value) {
208 AddU4(value);
209 }
210
211 // The ID for the synthetic object generated to account for class static overhead.
AddClassStaticsId(const mirror::Class * value)212 void AddClassStaticsId(const mirror::Class* value) {
213 AddU4(1 | PointerToLowMemUInt32(value));
214 }
215
AddJniGlobalRefId(jobject value)216 void AddJniGlobalRefId(jobject value) {
217 AddU4(PointerToLowMemUInt32(value));
218 }
219
AddClassId(HprofClassObjectId value)220 void AddClassId(HprofClassObjectId value) {
221 AddU4(value);
222 }
223
AddStringId(HprofStringId value)224 void AddStringId(HprofStringId value) {
225 AddU4(value);
226 }
227
AddU1List(const uint8_t * values,size_t count)228 void AddU1List(const uint8_t* values, size_t count) {
229 HandleU1List(values, count);
230 length_ += count;
231 }
AddU2List(const uint16_t * values,size_t count)232 void AddU2List(const uint16_t* values, size_t count) {
233 HandleU2List(values, count);
234 length_ += count * sizeof(uint16_t);
235 }
AddU4List(const uint32_t * values,size_t count)236 void AddU4List(const uint32_t* values, size_t count) {
237 HandleU4List(values, count);
238 length_ += count * sizeof(uint32_t);
239 }
UpdateU4(size_t offset,uint32_t new_value ATTRIBUTE_UNUSED)240 virtual void UpdateU4(size_t offset, uint32_t new_value ATTRIBUTE_UNUSED) {
241 DCHECK_LE(offset, length_ - 4);
242 }
AddU8List(const uint64_t * values,size_t count)243 void AddU8List(const uint64_t* values, size_t count) {
244 HandleU8List(values, count);
245 length_ += count * sizeof(uint64_t);
246 }
247
AddIdList(mirror::ObjectArray<mirror::Object> * values)248 void AddIdList(mirror::ObjectArray<mirror::Object>* values)
249 REQUIRES_SHARED(Locks::mutator_lock_) {
250 const int32_t length = values->GetLength();
251 for (int32_t i = 0; i < length; ++i) {
252 AddObjectId(values->GetWithoutChecks(i).Ptr());
253 }
254 }
255
AddUtf8String(const char * str)256 void AddUtf8String(const char* str) {
257 // The terminating NUL character is NOT written.
258 AddU1List((const uint8_t*)str, strlen(str));
259 }
260
Length() const261 size_t Length() const {
262 return length_;
263 }
264
SumLength() const265 size_t SumLength() const {
266 return sum_length_;
267 }
268
MaxLength() const269 size_t MaxLength() const {
270 return max_length_;
271 }
272
273 protected:
HandleU1List(const uint8_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)274 virtual void HandleU1List(const uint8_t* values ATTRIBUTE_UNUSED,
275 size_t count ATTRIBUTE_UNUSED) {
276 }
HandleU1AsU2List(const uint8_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)277 virtual void HandleU1AsU2List(const uint8_t* values ATTRIBUTE_UNUSED,
278 size_t count ATTRIBUTE_UNUSED) {
279 }
HandleU2List(const uint16_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)280 virtual void HandleU2List(const uint16_t* values ATTRIBUTE_UNUSED,
281 size_t count ATTRIBUTE_UNUSED) {
282 }
HandleU4List(const uint32_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)283 virtual void HandleU4List(const uint32_t* values ATTRIBUTE_UNUSED,
284 size_t count ATTRIBUTE_UNUSED) {
285 }
HandleU8List(const uint64_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)286 virtual void HandleU8List(const uint64_t* values ATTRIBUTE_UNUSED,
287 size_t count ATTRIBUTE_UNUSED) {
288 }
HandleEndRecord()289 virtual void HandleEndRecord() {
290 }
291
292 size_t length_; // Current record size.
293 size_t sum_length_; // Size of all data.
294 size_t max_length_; // Maximum seen length.
295 bool started_; // Was StartRecord called?
296 };
297
298 // This keeps things buffered until flushed.
299 class EndianOutputBuffered : public EndianOutput {
300 public:
EndianOutputBuffered(size_t reserve_size)301 explicit EndianOutputBuffered(size_t reserve_size) {
302 buffer_.reserve(reserve_size);
303 }
~EndianOutputBuffered()304 virtual ~EndianOutputBuffered() {}
305
UpdateU4(size_t offset,uint32_t new_value)306 void UpdateU4(size_t offset, uint32_t new_value) override {
307 DCHECK_LE(offset, length_ - 4);
308 buffer_[offset + 0] = static_cast<uint8_t>((new_value >> 24) & 0xFF);
309 buffer_[offset + 1] = static_cast<uint8_t>((new_value >> 16) & 0xFF);
310 buffer_[offset + 2] = static_cast<uint8_t>((new_value >> 8) & 0xFF);
311 buffer_[offset + 3] = static_cast<uint8_t>((new_value >> 0) & 0xFF);
312 }
313
314 protected:
HandleU1List(const uint8_t * values,size_t count)315 void HandleU1List(const uint8_t* values, size_t count) override {
316 DCHECK_EQ(length_, buffer_.size());
317 buffer_.insert(buffer_.end(), values, values + count);
318 }
319
HandleU1AsU2List(const uint8_t * values,size_t count)320 void HandleU1AsU2List(const uint8_t* values, size_t count) override {
321 DCHECK_EQ(length_, buffer_.size());
322 // All 8-bits are grouped in 2 to make 16-bit block like Java Char
323 if (count & 1) {
324 buffer_.push_back(0);
325 }
326 for (size_t i = 0; i < count; ++i) {
327 uint8_t value = *values;
328 buffer_.push_back(value);
329 values++;
330 }
331 }
332
HandleU2List(const uint16_t * values,size_t count)333 void HandleU2List(const uint16_t* values, size_t count) override {
334 DCHECK_EQ(length_, buffer_.size());
335 for (size_t i = 0; i < count; ++i) {
336 uint16_t value = *values;
337 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
338 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
339 values++;
340 }
341 }
342
HandleU4List(const uint32_t * values,size_t count)343 void HandleU4List(const uint32_t* values, size_t count) override {
344 DCHECK_EQ(length_, buffer_.size());
345 for (size_t i = 0; i < count; ++i) {
346 uint32_t value = *values;
347 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
348 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
349 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
350 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
351 values++;
352 }
353 }
354
HandleU8List(const uint64_t * values,size_t count)355 void HandleU8List(const uint64_t* values, size_t count) override {
356 DCHECK_EQ(length_, buffer_.size());
357 for (size_t i = 0; i < count; ++i) {
358 uint64_t value = *values;
359 buffer_.push_back(static_cast<uint8_t>((value >> 56) & 0xFF));
360 buffer_.push_back(static_cast<uint8_t>((value >> 48) & 0xFF));
361 buffer_.push_back(static_cast<uint8_t>((value >> 40) & 0xFF));
362 buffer_.push_back(static_cast<uint8_t>((value >> 32) & 0xFF));
363 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
364 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
365 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
366 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
367 values++;
368 }
369 }
370
HandleEndRecord()371 void HandleEndRecord() override {
372 DCHECK_EQ(buffer_.size(), length_);
373 if (kIsDebugBuild && started_) {
374 uint32_t stored_length =
375 static_cast<uint32_t>(buffer_[5]) << 24 |
376 static_cast<uint32_t>(buffer_[6]) << 16 |
377 static_cast<uint32_t>(buffer_[7]) << 8 |
378 static_cast<uint32_t>(buffer_[8]);
379 DCHECK_EQ(stored_length, length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
380 }
381 HandleFlush(buffer_.data(), length_);
382 buffer_.clear();
383 }
384
HandleFlush(const uint8_t * buffer ATTRIBUTE_UNUSED,size_t length ATTRIBUTE_UNUSED)385 virtual void HandleFlush(const uint8_t* buffer ATTRIBUTE_UNUSED, size_t length ATTRIBUTE_UNUSED) {
386 }
387
388 std::vector<uint8_t> buffer_;
389 };
390
391 class FileEndianOutput final : public EndianOutputBuffered {
392 public:
FileEndianOutput(File * fp,size_t reserved_size)393 FileEndianOutput(File* fp, size_t reserved_size)
394 : EndianOutputBuffered(reserved_size), fp_(fp), errors_(false) {
395 DCHECK(fp != nullptr);
396 }
~FileEndianOutput()397 ~FileEndianOutput() {
398 }
399
Errors()400 bool Errors() {
401 return errors_;
402 }
403
404 protected:
HandleFlush(const uint8_t * buffer,size_t length)405 void HandleFlush(const uint8_t* buffer, size_t length) override {
406 if (!errors_) {
407 errors_ = !fp_->WriteFully(buffer, length);
408 }
409 }
410
411 private:
412 File* fp_;
413 bool errors_;
414 };
415
416 class VectorEndianOuputput final : public EndianOutputBuffered {
417 public:
VectorEndianOuputput(std::vector<uint8_t> & data,size_t reserved_size)418 VectorEndianOuputput(std::vector<uint8_t>& data, size_t reserved_size)
419 : EndianOutputBuffered(reserved_size), full_data_(data) {}
~VectorEndianOuputput()420 ~VectorEndianOuputput() {}
421
422 protected:
HandleFlush(const uint8_t * buf,size_t length)423 void HandleFlush(const uint8_t* buf, size_t length) override {
424 size_t old_size = full_data_.size();
425 full_data_.resize(old_size + length);
426 memcpy(full_data_.data() + old_size, buf, length);
427 }
428
429 private:
430 std::vector<uint8_t>& full_data_;
431 };
432
433 #define __ output_->
434
435 class Hprof : public SingleRootVisitor {
436 public:
Hprof(const char * output_filename,int fd,bool direct_to_ddms)437 Hprof(const char* output_filename, int fd, bool direct_to_ddms)
438 : filename_(output_filename),
439 fd_(fd),
440 direct_to_ddms_(direct_to_ddms) {
441 LOG(INFO) << "hprof: heap dump \"" << filename_ << "\" starting...";
442 }
443
Dump()444 void Dump()
445 REQUIRES(Locks::mutator_lock_)
446 REQUIRES(!Locks::heap_bitmap_lock_, !Locks::alloc_tracker_lock_) {
447 {
448 MutexLock mu(Thread::Current(), *Locks::alloc_tracker_lock_);
449 if (Runtime::Current()->GetHeap()->IsAllocTrackingEnabled()) {
450 PopulateAllocationTrackingTraces();
451 }
452 }
453
454 // First pass to measure the size of the dump.
455 size_t overall_size;
456 size_t max_length;
457 {
458 EndianOutput count_output;
459 output_ = &count_output;
460 ProcessHeap(false);
461 overall_size = count_output.SumLength();
462 max_length = count_output.MaxLength();
463 output_ = nullptr;
464 }
465
466 bool okay;
467 visited_objects_.clear();
468 if (direct_to_ddms_) {
469 if (kDirectStream) {
470 okay = DumpToDdmsDirect(overall_size, max_length, CHUNK_TYPE("HPDS"));
471 } else {
472 okay = DumpToDdmsBuffered(overall_size, max_length);
473 }
474 } else {
475 okay = DumpToFile(overall_size, max_length);
476 }
477
478 if (okay) {
479 const uint64_t duration = NanoTime() - start_ns_;
480 LOG(INFO) << "hprof: heap dump completed (" << PrettySize(RoundUp(overall_size, KB))
481 << ") in " << PrettyDuration(duration)
482 << " objects " << total_objects_
483 << " objects with stack traces " << total_objects_with_stack_trace_;
484 }
485 }
486
487 private:
488 void DumpHeapObject(mirror::Object* obj)
489 REQUIRES_SHARED(Locks::mutator_lock_);
490
491 void DumpHeapClass(mirror::Class* klass)
492 REQUIRES_SHARED(Locks::mutator_lock_);
493
494 void DumpHeapArray(mirror::Array* obj, mirror::Class* klass)
495 REQUIRES_SHARED(Locks::mutator_lock_);
496
497 void DumpFakeObjectArray(mirror::Object* obj, const std::set<mirror::Object*>& elements)
498 REQUIRES_SHARED(Locks::mutator_lock_);
499
500 void DumpHeapInstanceObject(mirror::Object* obj,
501 mirror::Class* klass,
502 const std::set<mirror::Object*>& fake_roots)
503 REQUIRES_SHARED(Locks::mutator_lock_);
504
505 bool AddRuntimeInternalObjectsField(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
506
ProcessHeap(bool header_first)507 void ProcessHeap(bool header_first)
508 REQUIRES(Locks::mutator_lock_) {
509 // Reset current heap and object count.
510 current_heap_ = HPROF_HEAP_DEFAULT;
511 objects_in_segment_ = 0;
512
513 if (header_first) {
514 ProcessHeader(true);
515 ProcessBody();
516 } else {
517 ProcessBody();
518 ProcessHeader(false);
519 }
520 }
521
ProcessBody()522 void ProcessBody() REQUIRES(Locks::mutator_lock_) {
523 Runtime* const runtime = Runtime::Current();
524 // Walk the roots and the heap.
525 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
526
527 simple_roots_.clear();
528 runtime->VisitRoots(this);
529 runtime->VisitImageRoots(this);
530 auto dump_object = [this](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
531 DCHECK(obj != nullptr);
532 DumpHeapObject(obj);
533 };
534 runtime->GetHeap()->VisitObjectsPaused(dump_object);
535 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_END, kHprofTime);
536 output_->EndRecord();
537 }
538
ProcessHeader(bool string_first)539 void ProcessHeader(bool string_first) REQUIRES(Locks::mutator_lock_) {
540 // Write the header.
541 WriteFixedHeader();
542 // Write the string and class tables, and any stack traces, to the header.
543 // (jhat requires that these appear before any of the data in the body that refers to them.)
544 // jhat also requires the string table appear before class table and stack traces.
545 // However, WriteStackTraces() can modify the string table, so it's necessary to call
546 // WriteStringTable() last in the first pass, to compute the correct length of the output.
547 if (string_first) {
548 WriteStringTable();
549 }
550 WriteClassTable();
551 WriteStackTraces();
552 if (!string_first) {
553 WriteStringTable();
554 }
555 output_->EndRecord();
556 }
557
WriteClassTable()558 void WriteClassTable() REQUIRES_SHARED(Locks::mutator_lock_) {
559 for (const auto& p : classes_) {
560 mirror::Class* c = p.first;
561 HprofClassSerialNumber sn = p.second;
562 CHECK(c != nullptr);
563 output_->StartNewRecord(HPROF_TAG_LOAD_CLASS, kHprofTime);
564 // LOAD CLASS format:
565 // U4: class serial number (always > 0)
566 // ID: class object ID. We use the address of the class object structure as its ID.
567 // U4: stack trace serial number
568 // ID: class name string ID
569 __ AddU4(sn);
570 __ AddObjectId(c);
571 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c));
572 __ AddStringId(LookupClassNameId(c));
573 }
574 }
575
WriteStringTable()576 void WriteStringTable() {
577 for (const auto& p : strings_) {
578 const std::string& string = p.first;
579 const HprofStringId id = p.second;
580
581 output_->StartNewRecord(HPROF_TAG_STRING, kHprofTime);
582
583 // STRING format:
584 // ID: ID for this string
585 // U1*: UTF8 characters for string (NOT null terminated)
586 // (the record format encodes the length)
587 __ AddU4(id);
588 __ AddUtf8String(string.c_str());
589 }
590 }
591
StartNewHeapDumpSegment()592 void StartNewHeapDumpSegment() {
593 // This flushes the old segment and starts a new one.
594 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
595 objects_in_segment_ = 0;
596 // Starting a new HEAP_DUMP resets the heap to default.
597 current_heap_ = HPROF_HEAP_DEFAULT;
598 }
599
CheckHeapSegmentConstraints()600 void CheckHeapSegmentConstraints() {
601 if (objects_in_segment_ >= kMaxObjectsPerSegment || output_->Length() >= kMaxBytesPerSegment) {
602 StartNewHeapDumpSegment();
603 }
604 }
605
606 void VisitRoot(mirror::Object* obj, const RootInfo& root_info)
607 override REQUIRES_SHARED(Locks::mutator_lock_);
608 void MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
609 uint32_t thread_serial);
610
LookupClassId(mirror::Class * c)611 HprofClassObjectId LookupClassId(mirror::Class* c) REQUIRES_SHARED(Locks::mutator_lock_) {
612 if (c != nullptr) {
613 auto it = classes_.find(c);
614 if (it == classes_.end()) {
615 // first time to see this class
616 HprofClassSerialNumber sn = next_class_serial_number_++;
617 classes_.Put(c, sn);
618 // Make sure that we've assigned a string ID for this class' name
619 LookupClassNameId(c);
620 }
621 }
622 return PointerToLowMemUInt32(c);
623 }
624
LookupStackTraceSerialNumber(const mirror::Object * obj)625 HprofStackTraceSerialNumber LookupStackTraceSerialNumber(const mirror::Object* obj)
626 REQUIRES_SHARED(Locks::mutator_lock_) {
627 auto r = allocation_records_.find(obj);
628 if (r == allocation_records_.end()) {
629 return kHprofNullStackTrace;
630 } else {
631 const gc::AllocRecordStackTrace* trace = r->second;
632 auto result = traces_.find(trace);
633 CHECK(result != traces_.end());
634 return result->second;
635 }
636 }
637
LookupStringId(mirror::String * string)638 HprofStringId LookupStringId(mirror::String* string) REQUIRES_SHARED(Locks::mutator_lock_) {
639 return LookupStringId(string->ToModifiedUtf8());
640 }
641
LookupStringId(const char * string)642 HprofStringId LookupStringId(const char* string) {
643 return LookupStringId(std::string(string));
644 }
645
LookupStringId(const std::string & string)646 HprofStringId LookupStringId(const std::string& string) {
647 auto it = strings_.find(string);
648 if (it != strings_.end()) {
649 return it->second;
650 }
651 HprofStringId id = next_string_id_++;
652 strings_.Put(string, id);
653 return id;
654 }
655
LookupClassNameId(mirror::Class * c)656 HprofStringId LookupClassNameId(mirror::Class* c) REQUIRES_SHARED(Locks::mutator_lock_) {
657 return LookupStringId(c->PrettyDescriptor());
658 }
659
WriteFixedHeader()660 void WriteFixedHeader() {
661 // Write the file header.
662 // U1: NUL-terminated magic string.
663 const char magic[] = "JAVA PROFILE 1.0.3";
664 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic));
665
666 // U4: size of identifiers. We're using addresses as IDs and our heap references are stored
667 // as uint32_t.
668 // Note of warning: hprof-conv hard-codes the size of identifiers to 4.
669 static_assert(sizeof(mirror::HeapReference<mirror::Object>) == sizeof(uint32_t),
670 "Unexpected HeapReference size");
671 __ AddU4(sizeof(uint32_t));
672
673 // The current time, in milliseconds since 0:00 GMT, 1/1/70.
674 timeval now;
675 const uint64_t nowMs = (gettimeofday(&now, nullptr) < 0) ? 0 :
676 (uint64_t)now.tv_sec * 1000 + now.tv_usec / 1000;
677 // TODO: It seems it would be correct to use U8.
678 // U4: high word of the 64-bit time.
679 __ AddU4(static_cast<uint32_t>(nowMs >> 32));
680 // U4: low word of the 64-bit time.
681 __ AddU4(static_cast<uint32_t>(nowMs & 0xFFFFFFFF));
682 }
683
WriteStackTraces()684 void WriteStackTraces() REQUIRES_SHARED(Locks::mutator_lock_) {
685 // Write a dummy stack trace record so the analysis tools don't freak out.
686 output_->StartNewRecord(HPROF_TAG_STACK_TRACE, kHprofTime);
687 __ AddStackTraceSerialNumber(kHprofNullStackTrace);
688 __ AddU4(kHprofNullThread);
689 __ AddU4(0); // no frames
690
691 // TODO: jhat complains "WARNING: Stack trace not found for serial # -1", but no trace should
692 // have -1 as its serial number (as long as HprofStackTraceSerialNumber doesn't overflow).
693 for (const auto& it : traces_) {
694 const gc::AllocRecordStackTrace* trace = it.first;
695 HprofStackTraceSerialNumber trace_sn = it.second;
696 size_t depth = trace->GetDepth();
697
698 // First write stack frames of the trace
699 for (size_t i = 0; i < depth; ++i) {
700 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
701 ArtMethod* method = frame->GetMethod();
702 CHECK(method != nullptr);
703 output_->StartNewRecord(HPROF_TAG_STACK_FRAME, kHprofTime);
704 // STACK FRAME format:
705 // ID: stack frame ID. We use the address of the AllocRecordStackTraceElement object as its ID.
706 // ID: method name string ID
707 // ID: method signature string ID
708 // ID: source file name string ID
709 // U4: class serial number
710 // U4: >0, line number; 0, no line information available; -1, unknown location
711 auto frame_result = frames_.find(frame);
712 CHECK(frame_result != frames_.end());
713 __ AddU4(frame_result->second);
714 __ AddStringId(LookupStringId(method->GetName()));
715 __ AddStringId(LookupStringId(method->GetSignature().ToString()));
716 const char* source_file = method->GetDeclaringClassSourceFile();
717 if (source_file == nullptr) {
718 source_file = "";
719 }
720 __ AddStringId(LookupStringId(source_file));
721 auto class_result = classes_.find(method->GetDeclaringClass().Ptr());
722 CHECK(class_result != classes_.end());
723 __ AddU4(class_result->second);
724 __ AddU4(frame->ComputeLineNumber());
725 }
726
727 // Then write the trace itself
728 output_->StartNewRecord(HPROF_TAG_STACK_TRACE, kHprofTime);
729 // STACK TRACE format:
730 // U4: stack trace serial number. We use the address of the AllocRecordStackTrace object as its serial number.
731 // U4: thread serial number. We use Thread::GetTid().
732 // U4: number of frames
733 // [ID]*: series of stack frame ID's
734 __ AddStackTraceSerialNumber(trace_sn);
735 __ AddU4(trace->GetTid());
736 __ AddU4(depth);
737 for (size_t i = 0; i < depth; ++i) {
738 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
739 auto frame_result = frames_.find(frame);
740 CHECK(frame_result != frames_.end());
741 __ AddU4(frame_result->second);
742 }
743 }
744 }
745
DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED,size_t max_length ATTRIBUTE_UNUSED)746 bool DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED, size_t max_length ATTRIBUTE_UNUSED)
747 REQUIRES(Locks::mutator_lock_) {
748 LOG(FATAL) << "Unimplemented";
749 UNREACHABLE();
750 // // Send the data off to DDMS.
751 // iovec iov[2];
752 // iov[0].iov_base = header_data_ptr_;
753 // iov[0].iov_len = header_data_size_;
754 // iov[1].iov_base = body_data_ptr_;
755 // iov[1].iov_len = body_data_size_;
756 // Dbg::DdmSendChunkV(CHUNK_TYPE("HPDS"), iov, 2);
757 }
758
DumpToFile(size_t overall_size,size_t max_length)759 bool DumpToFile(size_t overall_size, size_t max_length)
760 REQUIRES(Locks::mutator_lock_) {
761 // Where exactly are we writing to?
762 int out_fd;
763 if (fd_ >= 0) {
764 out_fd = DupCloexec(fd_);
765 if (out_fd < 0) {
766 ThrowRuntimeException("Couldn't dump heap; dup(%d) failed: %s", fd_, strerror(errno));
767 return false;
768 }
769 } else {
770 out_fd = open(filename_.c_str(), O_WRONLY | O_CREAT | O_TRUNC | O_CLOEXEC, 0644);
771 if (out_fd < 0) {
772 ThrowRuntimeException("Couldn't dump heap; open(\"%s\") failed: %s", filename_.c_str(),
773 strerror(errno));
774 return false;
775 }
776 }
777
778 std::unique_ptr<File> file(new File(out_fd, filename_, true));
779 bool okay;
780 {
781 FileEndianOutput file_output(file.get(), max_length);
782 output_ = &file_output;
783 ProcessHeap(true);
784 okay = !file_output.Errors();
785
786 if (okay) {
787 // Check for expected size. Output is expected to be less-or-equal than first phase, see
788 // b/23521263.
789 DCHECK_LE(file_output.SumLength(), overall_size);
790 }
791 output_ = nullptr;
792 }
793
794 if (okay) {
795 okay = file->FlushCloseOrErase() == 0;
796 } else {
797 file->Erase();
798 }
799 if (!okay) {
800 std::string msg(android::base::StringPrintf("Couldn't dump heap; writing \"%s\" failed: %s",
801 filename_.c_str(),
802 strerror(errno)));
803 ThrowRuntimeException("%s", msg.c_str());
804 LOG(ERROR) << msg;
805 }
806
807 return okay;
808 }
809
DumpToDdmsDirect(size_t overall_size,size_t max_length,uint32_t chunk_type)810 bool DumpToDdmsDirect(size_t overall_size, size_t max_length, uint32_t chunk_type)
811 REQUIRES(Locks::mutator_lock_) {
812 CHECK(direct_to_ddms_);
813
814 std::vector<uint8_t> out_data;
815
816 // TODO It would be really good to have some streaming thing again. b/73084059
817 VectorEndianOuputput output(out_data, max_length);
818 output_ = &output;
819
820 // Write the dump.
821 ProcessHeap(true);
822
823 Runtime::Current()->GetRuntimeCallbacks()->DdmPublishChunk(
824 chunk_type, ArrayRef<const uint8_t>(out_data.data(), out_data.size()));
825
826 // Check for expected size. See DumpToFile for comment.
827 DCHECK_LE(output.SumLength(), overall_size);
828 output_ = nullptr;
829
830 return true;
831 }
832
PopulateAllocationTrackingTraces()833 void PopulateAllocationTrackingTraces()
834 REQUIRES(Locks::mutator_lock_, Locks::alloc_tracker_lock_) {
835 gc::AllocRecordObjectMap* records = Runtime::Current()->GetHeap()->GetAllocationRecords();
836 CHECK(records != nullptr);
837 HprofStackTraceSerialNumber next_trace_sn = kHprofNullStackTrace + 1;
838 HprofStackFrameId next_frame_id = 0;
839 size_t count = 0;
840
841 for (auto it = records->Begin(), end = records->End(); it != end; ++it) {
842 const mirror::Object* obj = it->first.Read();
843 if (obj == nullptr) {
844 continue;
845 }
846 ++count;
847 const gc::AllocRecordStackTrace* trace = it->second.GetStackTrace();
848
849 // Copy the pair into a real hash map to speed up look up.
850 auto records_result = allocation_records_.emplace(obj, trace);
851 // The insertion should always succeed, i.e. no duplicate object pointers in "records"
852 CHECK(records_result.second);
853
854 // Generate serial numbers for traces, and IDs for frames.
855 auto traces_result = traces_.find(trace);
856 if (traces_result == traces_.end()) {
857 traces_.emplace(trace, next_trace_sn++);
858 // only check frames if the trace is newly discovered
859 for (size_t i = 0, depth = trace->GetDepth(); i < depth; ++i) {
860 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
861 auto frames_result = frames_.find(frame);
862 if (frames_result == frames_.end()) {
863 frames_.emplace(frame, next_frame_id++);
864 }
865 }
866 }
867 }
868 CHECK_EQ(traces_.size(), next_trace_sn - kHprofNullStackTrace - 1);
869 CHECK_EQ(frames_.size(), next_frame_id);
870 total_objects_with_stack_trace_ = count;
871 }
872
873 // If direct_to_ddms_ is set, "filename_" and "fd" will be ignored.
874 // Otherwise, "filename_" must be valid, though if "fd" >= 0 it will
875 // only be used for debug messages.
876 std::string filename_;
877 int fd_;
878 bool direct_to_ddms_;
879
880 uint64_t start_ns_ = NanoTime();
881
882 EndianOutput* output_ = nullptr;
883
884 HprofHeapId current_heap_ = HPROF_HEAP_DEFAULT; // Which heap we're currently dumping.
885 size_t objects_in_segment_ = 0;
886
887 size_t total_objects_ = 0u;
888 size_t total_objects_with_stack_trace_ = 0u;
889
890 HprofStringId next_string_id_ = 0x400000;
891 SafeMap<std::string, HprofStringId> strings_;
892 HprofClassSerialNumber next_class_serial_number_ = 1;
893 SafeMap<mirror::Class*, HprofClassSerialNumber> classes_;
894
895 std::unordered_map<const gc::AllocRecordStackTrace*, HprofStackTraceSerialNumber,
896 gc::HashAllocRecordTypesPtr<gc::AllocRecordStackTrace>,
897 gc::EqAllocRecordTypesPtr<gc::AllocRecordStackTrace>> traces_;
898 std::unordered_map<const gc::AllocRecordStackTraceElement*, HprofStackFrameId,
899 gc::HashAllocRecordTypesPtr<gc::AllocRecordStackTraceElement>,
900 gc::EqAllocRecordTypesPtr<gc::AllocRecordStackTraceElement>> frames_;
901 std::unordered_map<const mirror::Object*, const gc::AllocRecordStackTrace*> allocation_records_;
902
903 // Set used to keep track of what simple root records we have already
904 // emitted, to avoid emitting duplicate entries. The simple root records are
905 // those that contain no other information than the root type and the object
906 // id. A pair of root type and object id is packed into a uint64_t, with
907 // the root type in the upper 32 bits and the object id in the lower 32
908 // bits.
909 std::unordered_set<uint64_t> simple_roots_;
910
911 // To make sure we don't dump the same object multiple times. b/34967844
912 std::unordered_set<mirror::Object*> visited_objects_;
913
914 friend class GcRootVisitor;
915 DISALLOW_COPY_AND_ASSIGN(Hprof);
916 };
917
SignatureToBasicTypeAndSize(const char * sig,size_t * size_out)918 static HprofBasicType SignatureToBasicTypeAndSize(const char* sig, size_t* size_out) {
919 char c = sig[0];
920 HprofBasicType ret;
921 size_t size;
922
923 switch (c) {
924 case '[':
925 case 'L':
926 ret = hprof_basic_object;
927 size = 4;
928 break;
929 case 'Z':
930 ret = hprof_basic_boolean;
931 size = 1;
932 break;
933 case 'C':
934 ret = hprof_basic_char;
935 size = 2;
936 break;
937 case 'F':
938 ret = hprof_basic_float;
939 size = 4;
940 break;
941 case 'D':
942 ret = hprof_basic_double;
943 size = 8;
944 break;
945 case 'B':
946 ret = hprof_basic_byte;
947 size = 1;
948 break;
949 case 'S':
950 ret = hprof_basic_short;
951 size = 2;
952 break;
953 case 'I':
954 ret = hprof_basic_int;
955 size = 4;
956 break;
957 case 'J':
958 ret = hprof_basic_long;
959 size = 8;
960 break;
961 default:
962 LOG(FATAL) << "UNREACHABLE";
963 UNREACHABLE();
964 }
965
966 if (size_out != nullptr) {
967 *size_out = size;
968 }
969
970 return ret;
971 }
972
973 // Always called when marking objects, but only does
974 // something when ctx->gc_scan_state_ is non-zero, which is usually
975 // only true when marking the root set or unreachable
976 // objects. Used to add rootset references to obj.
MarkRootObject(const mirror::Object * obj,jobject jni_obj,HprofHeapTag heap_tag,uint32_t thread_serial)977 void Hprof::MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
978 uint32_t thread_serial) {
979 if (heap_tag == 0) {
980 return;
981 }
982
983 CheckHeapSegmentConstraints();
984
985 switch (heap_tag) {
986 // ID: object ID
987 case HPROF_ROOT_UNKNOWN:
988 case HPROF_ROOT_STICKY_CLASS:
989 case HPROF_ROOT_MONITOR_USED:
990 case HPROF_ROOT_INTERNED_STRING:
991 case HPROF_ROOT_DEBUGGER:
992 case HPROF_ROOT_VM_INTERNAL: {
993 uint64_t key = (static_cast<uint64_t>(heap_tag) << 32) | PointerToLowMemUInt32(obj);
994 if (simple_roots_.insert(key).second) {
995 __ AddU1(heap_tag);
996 __ AddObjectId(obj);
997 }
998 break;
999 }
1000
1001 // ID: object ID
1002 // ID: JNI global ref ID
1003 case HPROF_ROOT_JNI_GLOBAL:
1004 __ AddU1(heap_tag);
1005 __ AddObjectId(obj);
1006 __ AddJniGlobalRefId(jni_obj);
1007 break;
1008
1009 // ID: object ID
1010 // U4: thread serial number
1011 // U4: frame number in stack trace (-1 for empty)
1012 case HPROF_ROOT_JNI_LOCAL:
1013 case HPROF_ROOT_JNI_MONITOR:
1014 case HPROF_ROOT_JAVA_FRAME:
1015 __ AddU1(heap_tag);
1016 __ AddObjectId(obj);
1017 __ AddU4(thread_serial);
1018 __ AddU4((uint32_t)-1);
1019 break;
1020
1021 // ID: object ID
1022 // U4: thread serial number
1023 case HPROF_ROOT_NATIVE_STACK:
1024 case HPROF_ROOT_THREAD_BLOCK:
1025 __ AddU1(heap_tag);
1026 __ AddObjectId(obj);
1027 __ AddU4(thread_serial);
1028 break;
1029
1030 // ID: thread object ID
1031 // U4: thread serial number
1032 // U4: stack trace serial number
1033 case HPROF_ROOT_THREAD_OBJECT:
1034 __ AddU1(heap_tag);
1035 __ AddObjectId(obj);
1036 __ AddU4(thread_serial);
1037 __ AddU4((uint32_t)-1); // xxx
1038 break;
1039
1040 case HPROF_CLASS_DUMP:
1041 case HPROF_INSTANCE_DUMP:
1042 case HPROF_OBJECT_ARRAY_DUMP:
1043 case HPROF_PRIMITIVE_ARRAY_DUMP:
1044 case HPROF_HEAP_DUMP_INFO:
1045 case HPROF_PRIMITIVE_ARRAY_NODATA_DUMP:
1046 // Ignored.
1047 break;
1048
1049 case HPROF_ROOT_FINALIZING:
1050 case HPROF_ROOT_REFERENCE_CLEANUP:
1051 case HPROF_UNREACHABLE:
1052 LOG(FATAL) << "obsolete tag " << static_cast<int>(heap_tag);
1053 UNREACHABLE();
1054 }
1055
1056 ++objects_in_segment_;
1057 }
1058
AddRuntimeInternalObjectsField(mirror::Class * klass)1059 bool Hprof::AddRuntimeInternalObjectsField(mirror::Class* klass) {
1060 if (klass->IsDexCacheClass()) {
1061 return true;
1062 }
1063 // IsClassLoaderClass is true for subclasses of classloader but we only want to add the fake
1064 // field to the java.lang.ClassLoader class.
1065 if (klass->IsClassLoaderClass() && klass->GetSuperClass()->IsObjectClass()) {
1066 return true;
1067 }
1068 return false;
1069 }
1070
DumpHeapObject(mirror::Object * obj)1071 void Hprof::DumpHeapObject(mirror::Object* obj) {
1072 // Ignore classes that are retired.
1073 if (obj->IsClass() && obj->AsClass()->IsRetired()) {
1074 return;
1075 }
1076 DCHECK(visited_objects_.insert(obj).second)
1077 << "Already visited " << obj << "(" << obj->PrettyTypeOf() << ")";
1078
1079 ++total_objects_;
1080
1081 class RootCollector {
1082 public:
1083 RootCollector() {}
1084
1085 void operator()(mirror::Object*, MemberOffset, bool) const {}
1086
1087 // Note that these don't have read barriers. Its OK however since the GC is guaranteed to not be
1088 // running during the hprof dumping process.
1089 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1090 REQUIRES_SHARED(Locks::mutator_lock_) {
1091 if (!root->IsNull()) {
1092 VisitRoot(root);
1093 }
1094 }
1095
1096 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1097 REQUIRES_SHARED(Locks::mutator_lock_) {
1098 roots_.insert(root->AsMirrorPtr());
1099 }
1100
1101 const std::set<mirror::Object*>& GetRoots() const {
1102 return roots_;
1103 }
1104
1105 private:
1106 // These roots are actually live from the object. Avoid marking them as roots in hprof to make
1107 // it easier to debug class unloading.
1108 mutable std::set<mirror::Object*> roots_;
1109 };
1110
1111 RootCollector visitor;
1112 // Collect all native roots.
1113 if (!obj->IsClass()) {
1114 obj->VisitReferences(visitor, VoidFunctor());
1115 }
1116
1117 gc::Heap* const heap = Runtime::Current()->GetHeap();
1118 const gc::space::ContinuousSpace* const space = heap->FindContinuousSpaceFromObject(obj, true);
1119 HprofHeapId heap_type = HPROF_HEAP_APP;
1120 if (space != nullptr) {
1121 if (space->IsZygoteSpace()) {
1122 heap_type = HPROF_HEAP_ZYGOTE;
1123 VisitRoot(obj, RootInfo(kRootVMInternal));
1124 } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
1125 // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects as
1126 // HPROF_HEAP_APP. b/35762934
1127 heap_type = HPROF_HEAP_IMAGE;
1128 VisitRoot(obj, RootInfo(kRootVMInternal));
1129 }
1130 } else {
1131 const auto* los = heap->GetLargeObjectsSpace();
1132 if (los->Contains(obj) && los->IsZygoteLargeObject(Thread::Current(), obj)) {
1133 heap_type = HPROF_HEAP_ZYGOTE;
1134 VisitRoot(obj, RootInfo(kRootVMInternal));
1135 }
1136 }
1137 CheckHeapSegmentConstraints();
1138
1139 if (heap_type != current_heap_) {
1140 HprofStringId nameId;
1141
1142 // This object is in a different heap than the current one.
1143 // Emit a HEAP_DUMP_INFO tag to change heaps.
1144 __ AddU1(HPROF_HEAP_DUMP_INFO);
1145 __ AddU4(static_cast<uint32_t>(heap_type)); // uint32_t: heap type
1146 switch (heap_type) {
1147 case HPROF_HEAP_APP:
1148 nameId = LookupStringId("app");
1149 break;
1150 case HPROF_HEAP_ZYGOTE:
1151 nameId = LookupStringId("zygote");
1152 break;
1153 case HPROF_HEAP_IMAGE:
1154 nameId = LookupStringId("image");
1155 break;
1156 default:
1157 // Internal error
1158 LOG(ERROR) << "Unexpected desiredHeap";
1159 nameId = LookupStringId("<ILLEGAL>");
1160 break;
1161 }
1162 __ AddStringId(nameId);
1163 current_heap_ = heap_type;
1164 }
1165
1166 mirror::Class* c = obj->GetClass();
1167 if (c == nullptr) {
1168 // This object will bother HprofReader, because it has a null
1169 // class, so just don't dump it. It could be
1170 // gDvm.unlinkedJavaLangClass or it could be an object just
1171 // allocated which hasn't been initialized yet.
1172 } else {
1173 if (obj->IsClass()) {
1174 DumpHeapClass(obj->AsClass().Ptr());
1175 } else if (c->IsArrayClass()) {
1176 DumpHeapArray(obj->AsArray().Ptr(), c);
1177 } else {
1178 DumpHeapInstanceObject(obj, c, visitor.GetRoots());
1179 }
1180 }
1181
1182 ++objects_in_segment_;
1183 }
1184
DumpHeapClass(mirror::Class * klass)1185 void Hprof::DumpHeapClass(mirror::Class* klass) {
1186 if (!klass->IsResolved()) {
1187 // Class is allocated but not yet resolved: we cannot access its fields or super class.
1188 return;
1189 }
1190
1191 // Note: We will emit instance fields of Class as synthetic static fields with a prefix of
1192 // "$class$" so the class fields are visible in hprof dumps. For tools to account for that
1193 // correctly, we'll emit an instance size of zero for java.lang.Class, and also emit the
1194 // instance fields of java.lang.Object.
1195 //
1196 // For other overhead (currently only the embedded vtable), we will generate a synthetic
1197 // byte array (or field[s] in case the overhead size is of reference size or less).
1198
1199 const size_t num_static_fields = klass->NumStaticFields();
1200
1201 // Total class size:
1202 // * class instance fields (including Object instance fields)
1203 // * vtable
1204 // * class static fields
1205 const size_t total_class_size = klass->GetClassSize();
1206
1207 // Base class size (common parts of all Class instances):
1208 // * class instance fields (including Object instance fields)
1209 constexpr size_t base_class_size = sizeof(mirror::Class);
1210 CHECK_LE(base_class_size, total_class_size);
1211
1212 // Difference of Total and Base:
1213 // * vtable
1214 // * class static fields
1215 const size_t base_overhead_size = total_class_size - base_class_size;
1216
1217 // Tools (ahat/Studio) will count the static fields and account for them in the class size. We
1218 // must thus subtract them from base_overhead_size or they will be double-counted.
1219 size_t class_static_fields_size = 0;
1220 for (ArtField& class_static_field : klass->GetSFields()) {
1221 size_t size = 0;
1222 SignatureToBasicTypeAndSize(class_static_field.GetTypeDescriptor(), &size);
1223 class_static_fields_size += size;
1224 }
1225
1226 CHECK_GE(base_overhead_size, class_static_fields_size);
1227 // Now we have:
1228 // * vtable
1229 const size_t base_no_statics_overhead_size = base_overhead_size - class_static_fields_size;
1230
1231 // We may decide to display native overhead (the actual IMT, ArtFields and ArtMethods) in the
1232 // future.
1233 const size_t java_heap_overhead_size = base_no_statics_overhead_size;
1234
1235 // For overhead greater 4, we'll allocate a synthetic array.
1236 if (java_heap_overhead_size > 4) {
1237 // Create a byte array to reflect the allocation of the
1238 // StaticField array at the end of this class.
1239 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1240 __ AddClassStaticsId(klass);
1241 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(klass));
1242 __ AddU4(java_heap_overhead_size - 4);
1243 __ AddU1(hprof_basic_byte);
1244 for (size_t i = 0; i < java_heap_overhead_size - 4; ++i) {
1245 __ AddU1(0);
1246 }
1247 }
1248 const size_t java_heap_overhead_field_count = java_heap_overhead_size > 0
1249 ? (java_heap_overhead_size == 3 ? 2u : 1u)
1250 : 0;
1251
1252 __ AddU1(HPROF_CLASS_DUMP);
1253 __ AddClassId(LookupClassId(klass));
1254 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(klass));
1255 __ AddClassId(LookupClassId(klass->GetSuperClass().Ptr()));
1256 __ AddObjectId(klass->GetClassLoader().Ptr());
1257 __ AddObjectId(nullptr); // no signer
1258 __ AddObjectId(nullptr); // no prot domain
1259 __ AddObjectId(nullptr); // reserved
1260 __ AddObjectId(nullptr); // reserved
1261 // Instance size.
1262 if (klass->IsClassClass()) {
1263 // As mentioned above, we will emit instance fields as synthetic static fields. So the
1264 // base object is "empty."
1265 __ AddU4(0);
1266 } else if (klass->IsStringClass()) {
1267 // Strings are variable length with character data at the end like arrays.
1268 // This outputs the size of an empty string.
1269 __ AddU4(sizeof(mirror::String));
1270 } else if (klass->IsArrayClass() || klass->IsPrimitive()) {
1271 __ AddU4(0);
1272 } else {
1273 __ AddU4(klass->GetObjectSize()); // instance size
1274 }
1275
1276 __ AddU2(0); // empty const pool
1277
1278 // Static fields
1279 //
1280 // Note: we report Class' and Object's instance fields here, too. This is for visibility reasons.
1281 // (b/38167721)
1282 mirror::Class* class_class = klass->GetClass();
1283
1284 DCHECK(class_class->GetSuperClass()->IsObjectClass());
1285 const size_t static_fields_reported = class_class->NumInstanceFields()
1286 + class_class->GetSuperClass()->NumInstanceFields()
1287 + java_heap_overhead_field_count
1288 + num_static_fields;
1289 __ AddU2(dchecked_integral_cast<uint16_t>(static_fields_reported));
1290
1291 if (java_heap_overhead_size != 0) {
1292 __ AddStringId(LookupStringId(kClassOverheadName));
1293 size_t overhead_fields = 0;
1294 if (java_heap_overhead_size > 4) {
1295 __ AddU1(hprof_basic_object);
1296 __ AddClassStaticsId(klass);
1297 ++overhead_fields;
1298 } else {
1299 switch (java_heap_overhead_size) {
1300 case 4: {
1301 __ AddU1(hprof_basic_int);
1302 __ AddU4(0);
1303 ++overhead_fields;
1304 break;
1305 }
1306
1307 case 2: {
1308 __ AddU1(hprof_basic_short);
1309 __ AddU2(0);
1310 ++overhead_fields;
1311 break;
1312 }
1313
1314 case 3: {
1315 __ AddU1(hprof_basic_short);
1316 __ AddU2(0);
1317 __ AddStringId(LookupStringId(std::string(kClassOverheadName) + "2"));
1318 ++overhead_fields;
1319 }
1320 FALLTHROUGH_INTENDED;
1321
1322 case 1: {
1323 __ AddU1(hprof_basic_byte);
1324 __ AddU1(0);
1325 ++overhead_fields;
1326 break;
1327 }
1328 }
1329 }
1330 DCHECK_EQ(java_heap_overhead_field_count, overhead_fields);
1331 }
1332
1333 // Helper lambda to emit the given static field. The second argument name_fn will be called to
1334 // generate the name to emit. This can be used to emit something else than the field's actual
1335 // name.
1336 auto static_field_writer = [&](ArtField& field, auto name_fn)
1337 REQUIRES_SHARED(Locks::mutator_lock_) {
1338 __ AddStringId(LookupStringId(name_fn(field)));
1339
1340 size_t size;
1341 HprofBasicType t = SignatureToBasicTypeAndSize(field.GetTypeDescriptor(), &size);
1342 __ AddU1(t);
1343 switch (t) {
1344 case hprof_basic_byte:
1345 __ AddU1(field.GetByte(klass));
1346 return;
1347 case hprof_basic_boolean:
1348 __ AddU1(field.GetBoolean(klass));
1349 return;
1350 case hprof_basic_char:
1351 __ AddU2(field.GetChar(klass));
1352 return;
1353 case hprof_basic_short:
1354 __ AddU2(field.GetShort(klass));
1355 return;
1356 case hprof_basic_float:
1357 case hprof_basic_int:
1358 case hprof_basic_object:
1359 __ AddU4(field.Get32(klass));
1360 return;
1361 case hprof_basic_double:
1362 case hprof_basic_long:
1363 __ AddU8(field.Get64(klass));
1364 return;
1365 }
1366 LOG(FATAL) << "Unexpected size " << size;
1367 UNREACHABLE();
1368 };
1369
1370 {
1371 auto class_instance_field_name_fn = [](ArtField& field) REQUIRES_SHARED(Locks::mutator_lock_) {
1372 return std::string("$class$") + field.GetName();
1373 };
1374 for (ArtField& class_instance_field : class_class->GetIFields()) {
1375 static_field_writer(class_instance_field, class_instance_field_name_fn);
1376 }
1377 for (ArtField& object_instance_field : class_class->GetSuperClass()->GetIFields()) {
1378 static_field_writer(object_instance_field, class_instance_field_name_fn);
1379 }
1380 }
1381
1382 {
1383 auto class_static_field_name_fn = [](ArtField& field) REQUIRES_SHARED(Locks::mutator_lock_) {
1384 return field.GetName();
1385 };
1386 for (ArtField& class_static_field : klass->GetSFields()) {
1387 static_field_writer(class_static_field, class_static_field_name_fn);
1388 }
1389 }
1390
1391 // Instance fields for this class (no superclass fields)
1392 int iFieldCount = klass->NumInstanceFields();
1393 // add_internal_runtime_objects is only for classes that may retain objects live through means
1394 // other than fields. It is never the case for strings.
1395 const bool add_internal_runtime_objects = AddRuntimeInternalObjectsField(klass);
1396 if (klass->IsStringClass() || add_internal_runtime_objects) {
1397 __ AddU2((uint16_t)iFieldCount + 1);
1398 } else {
1399 __ AddU2((uint16_t)iFieldCount);
1400 }
1401 for (int i = 0; i < iFieldCount; ++i) {
1402 ArtField* f = klass->GetInstanceField(i);
1403 __ AddStringId(LookupStringId(f->GetName()));
1404 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), nullptr);
1405 __ AddU1(t);
1406 }
1407 // Add native value character array for strings / byte array for compressed strings.
1408 if (klass->IsStringClass()) {
1409 __ AddStringId(LookupStringId("value"));
1410 __ AddU1(hprof_basic_object);
1411 } else if (add_internal_runtime_objects) {
1412 __ AddStringId(LookupStringId("runtimeInternalObjects"));
1413 __ AddU1(hprof_basic_object);
1414 }
1415 }
1416
DumpFakeObjectArray(mirror::Object * obj,const std::set<mirror::Object * > & elements)1417 void Hprof::DumpFakeObjectArray(mirror::Object* obj, const std::set<mirror::Object*>& elements) {
1418 __ AddU1(HPROF_OBJECT_ARRAY_DUMP);
1419 __ AddObjectId(obj);
1420 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1421 __ AddU4(elements.size());
1422 __ AddClassId(LookupClassId(GetClassRoot<mirror::ObjectArray<mirror::Object>>().Ptr()));
1423 for (mirror::Object* e : elements) {
1424 __ AddObjectId(e);
1425 }
1426 }
1427
DumpHeapArray(mirror::Array * obj,mirror::Class * klass)1428 void Hprof::DumpHeapArray(mirror::Array* obj, mirror::Class* klass) {
1429 uint32_t length = obj->GetLength();
1430
1431 if (obj->IsObjectArray()) {
1432 // obj is an object array.
1433 __ AddU1(HPROF_OBJECT_ARRAY_DUMP);
1434
1435 __ AddObjectId(obj);
1436 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1437 __ AddU4(length);
1438 __ AddClassId(LookupClassId(klass));
1439
1440 // Dump the elements, which are always objects or null.
1441 __ AddIdList(obj->AsObjectArray<mirror::Object>().Ptr());
1442 } else {
1443 size_t size;
1444 HprofBasicType t = SignatureToBasicTypeAndSize(
1445 Primitive::Descriptor(klass->GetComponentType()->GetPrimitiveType()), &size);
1446
1447 // obj is a primitive array.
1448 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1449
1450 __ AddObjectId(obj);
1451 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1452 __ AddU4(length);
1453 __ AddU1(t);
1454
1455 // Dump the raw, packed element values.
1456 if (size == 1) {
1457 __ AddU1List(reinterpret_cast<const uint8_t*>(obj->GetRawData(sizeof(uint8_t), 0)), length);
1458 } else if (size == 2) {
1459 __ AddU2List(reinterpret_cast<const uint16_t*>(obj->GetRawData(sizeof(uint16_t), 0)), length);
1460 } else if (size == 4) {
1461 __ AddU4List(reinterpret_cast<const uint32_t*>(obj->GetRawData(sizeof(uint32_t), 0)), length);
1462 } else if (size == 8) {
1463 __ AddU8List(reinterpret_cast<const uint64_t*>(obj->GetRawData(sizeof(uint64_t), 0)), length);
1464 }
1465 }
1466 }
1467
DumpHeapInstanceObject(mirror::Object * obj,mirror::Class * klass,const std::set<mirror::Object * > & fake_roots)1468 void Hprof::DumpHeapInstanceObject(mirror::Object* obj,
1469 mirror::Class* klass,
1470 const std::set<mirror::Object*>& fake_roots) {
1471 // obj is an instance object.
1472 __ AddU1(HPROF_INSTANCE_DUMP);
1473 __ AddObjectId(obj);
1474 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1475 __ AddClassId(LookupClassId(klass));
1476
1477 // Reserve some space for the length of the instance data, which we won't
1478 // know until we're done writing it.
1479 size_t size_patch_offset = output_->Length();
1480 __ AddU4(0x77777777);
1481
1482 // What we will use for the string value if the object is a string.
1483 mirror::Object* string_value = nullptr;
1484 mirror::Object* fake_object_array = nullptr;
1485
1486 // Write the instance data; fields for this class, followed by super class fields, and so on.
1487 do {
1488 const size_t instance_fields = klass->NumInstanceFields();
1489 for (size_t i = 0; i < instance_fields; ++i) {
1490 ArtField* f = klass->GetInstanceField(i);
1491 size_t size;
1492 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), &size);
1493 switch (t) {
1494 case hprof_basic_byte:
1495 __ AddU1(f->GetByte(obj));
1496 break;
1497 case hprof_basic_boolean:
1498 __ AddU1(f->GetBoolean(obj));
1499 break;
1500 case hprof_basic_char:
1501 __ AddU2(f->GetChar(obj));
1502 break;
1503 case hprof_basic_short:
1504 __ AddU2(f->GetShort(obj));
1505 break;
1506 case hprof_basic_int:
1507 if (mirror::kUseStringCompression &&
1508 klass->IsStringClass() &&
1509 f->GetOffset().SizeValue() == mirror::String::CountOffset().SizeValue()) {
1510 // Store the string length instead of the raw count field with compression flag.
1511 __ AddU4(obj->AsString()->GetLength());
1512 break;
1513 }
1514 FALLTHROUGH_INTENDED;
1515 case hprof_basic_float:
1516 case hprof_basic_object:
1517 __ AddU4(f->Get32(obj));
1518 break;
1519 case hprof_basic_double:
1520 case hprof_basic_long:
1521 __ AddU8(f->Get64(obj));
1522 break;
1523 }
1524 }
1525 // Add value field for String if necessary.
1526 if (klass->IsStringClass()) {
1527 ObjPtr<mirror::String> s = obj->AsString();
1528 if (s->GetLength() == 0) {
1529 // If string is empty, use an object-aligned address within the string for the value.
1530 string_value = reinterpret_cast<mirror::Object*>(
1531 reinterpret_cast<uintptr_t>(s.Ptr()) + kObjectAlignment);
1532 } else {
1533 if (s->IsCompressed()) {
1534 string_value = reinterpret_cast<mirror::Object*>(s->GetValueCompressed());
1535 } else {
1536 string_value = reinterpret_cast<mirror::Object*>(s->GetValue());
1537 }
1538 }
1539 __ AddObjectId(string_value);
1540 } else if (AddRuntimeInternalObjectsField(klass)) {
1541 // We need an id that is guaranteed to not be used, use 1/2 of the object alignment.
1542 fake_object_array = reinterpret_cast<mirror::Object*>(
1543 reinterpret_cast<uintptr_t>(obj) + kObjectAlignment / 2);
1544 __ AddObjectId(fake_object_array);
1545 }
1546 klass = klass->GetSuperClass().Ptr();
1547 } while (klass != nullptr);
1548
1549 // Patch the instance field length.
1550 __ UpdateU4(size_patch_offset, output_->Length() - (size_patch_offset + 4));
1551
1552 // Output native value character array for strings.
1553 CHECK_EQ(obj->IsString(), string_value != nullptr);
1554 if (string_value != nullptr) {
1555 ObjPtr<mirror::String> s = obj->AsString();
1556 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1557 __ AddObjectId(string_value);
1558 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1559 __ AddU4(s->GetLength());
1560 if (s->IsCompressed()) {
1561 __ AddU1(hprof_basic_byte);
1562 __ AddU1List(s->GetValueCompressed(), s->GetLength());
1563 } else {
1564 __ AddU1(hprof_basic_char);
1565 __ AddU2List(s->GetValue(), s->GetLength());
1566 }
1567 } else if (fake_object_array != nullptr) {
1568 DumpFakeObjectArray(fake_object_array, fake_roots);
1569 }
1570 }
1571
VisitRoot(mirror::Object * obj,const RootInfo & info)1572 void Hprof::VisitRoot(mirror::Object* obj, const RootInfo& info) {
1573 static const HprofHeapTag xlate[] = {
1574 HPROF_ROOT_UNKNOWN,
1575 HPROF_ROOT_JNI_GLOBAL,
1576 HPROF_ROOT_JNI_LOCAL,
1577 HPROF_ROOT_JAVA_FRAME,
1578 HPROF_ROOT_NATIVE_STACK,
1579 HPROF_ROOT_STICKY_CLASS,
1580 HPROF_ROOT_THREAD_BLOCK,
1581 HPROF_ROOT_MONITOR_USED,
1582 HPROF_ROOT_THREAD_OBJECT,
1583 HPROF_ROOT_INTERNED_STRING,
1584 HPROF_ROOT_FINALIZING,
1585 HPROF_ROOT_DEBUGGER,
1586 HPROF_ROOT_REFERENCE_CLEANUP,
1587 HPROF_ROOT_VM_INTERNAL,
1588 HPROF_ROOT_JNI_MONITOR,
1589 };
1590 CHECK_LT(info.GetType(), sizeof(xlate) / sizeof(HprofHeapTag));
1591 if (obj == nullptr) {
1592 return;
1593 }
1594 MarkRootObject(obj, nullptr, xlate[info.GetType()], info.GetThreadId());
1595 }
1596
1597 // If "direct_to_ddms" is true, the other arguments are ignored, and data is
1598 // sent directly to DDMS.
1599 // If "fd" is >= 0, the output will be written to that file descriptor.
1600 // Otherwise, "filename" is used to create an output file.
DumpHeap(const char * filename,int fd,bool direct_to_ddms)1601 void DumpHeap(const char* filename, int fd, bool direct_to_ddms) {
1602 CHECK(filename != nullptr);
1603 Thread* self = Thread::Current();
1604 // Need to take a heap dump while GC isn't running. See the comment in Heap::VisitObjects().
1605 // Also we need the critical section to avoid visiting the same object twice. See b/34967844
1606 gc::ScopedGCCriticalSection gcs(self,
1607 gc::kGcCauseHprof,
1608 gc::kCollectorTypeHprof);
1609 ScopedSuspendAll ssa(__FUNCTION__, true /* long suspend */);
1610 Hprof hprof(filename, fd, direct_to_ddms);
1611 hprof.Dump();
1612 }
1613
1614 } // namespace hprof
1615 } // namespace art
1616