1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /*
18 * Preparation and completion of hprof data generation. The output is
19 * written into two files and then combined. This is necessary because
20 * we generate some of the data (strings and classes) while we dump the
21 * heap, and some analysis tools require that the class and string data
22 * appear first.
23 */
24
25 #include "hprof.h"
26
27 #include <cutils/open_memstream.h>
28 #include <errno.h>
29 #include <fcntl.h>
30 #include <stdio.h>
31 #include <string.h>
32 #include <sys/time.h>
33 #include <sys/uio.h>
34 #include <time.h>
35 #include <time.h>
36 #include <unistd.h>
37
38 #include <set>
39
40 #include "art_field-inl.h"
41 #include "base/logging.h"
42 #include "base/stringprintf.h"
43 #include "base/time_utils.h"
44 #include "base/unix_file/fd_file.h"
45 #include "class_linker.h"
46 #include "common_throws.h"
47 #include "debugger.h"
48 #include "dex_file-inl.h"
49 #include "gc_root.h"
50 #include "gc/accounting/heap_bitmap.h"
51 #include "gc/heap.h"
52 #include "gc/space/space.h"
53 #include "globals.h"
54 #include "jdwp/jdwp.h"
55 #include "jdwp/jdwp_priv.h"
56 #include "mirror/class.h"
57 #include "mirror/class-inl.h"
58 #include "mirror/object-inl.h"
59 #include "os.h"
60 #include "safe_map.h"
61 #include "scoped_thread_state_change.h"
62 #include "thread_list.h"
63
64 namespace art {
65
66 namespace hprof {
67
68 static constexpr bool kDirectStream = true;
69
70 static constexpr uint32_t kHprofTime = 0;
71 static constexpr uint32_t kHprofNullStackTrace = 0;
72 static constexpr uint32_t kHprofNullThread = 0;
73
74 static constexpr size_t kMaxObjectsPerSegment = 128;
75 static constexpr size_t kMaxBytesPerSegment = 4096;
76
77 // The static field-name for the synthetic object generated to account for class static overhead.
78 static constexpr const char* kStaticOverheadName = "$staticOverhead";
79
80 enum HprofTag {
81 HPROF_TAG_STRING = 0x01,
82 HPROF_TAG_LOAD_CLASS = 0x02,
83 HPROF_TAG_UNLOAD_CLASS = 0x03,
84 HPROF_TAG_STACK_FRAME = 0x04,
85 HPROF_TAG_STACK_TRACE = 0x05,
86 HPROF_TAG_ALLOC_SITES = 0x06,
87 HPROF_TAG_HEAP_SUMMARY = 0x07,
88 HPROF_TAG_START_THREAD = 0x0A,
89 HPROF_TAG_END_THREAD = 0x0B,
90 HPROF_TAG_HEAP_DUMP = 0x0C,
91 HPROF_TAG_HEAP_DUMP_SEGMENT = 0x1C,
92 HPROF_TAG_HEAP_DUMP_END = 0x2C,
93 HPROF_TAG_CPU_SAMPLES = 0x0D,
94 HPROF_TAG_CONTROL_SETTINGS = 0x0E,
95 };
96
97 // Values for the first byte of HEAP_DUMP and HEAP_DUMP_SEGMENT records:
98 enum HprofHeapTag {
99 // Traditional.
100 HPROF_ROOT_UNKNOWN = 0xFF,
101 HPROF_ROOT_JNI_GLOBAL = 0x01,
102 HPROF_ROOT_JNI_LOCAL = 0x02,
103 HPROF_ROOT_JAVA_FRAME = 0x03,
104 HPROF_ROOT_NATIVE_STACK = 0x04,
105 HPROF_ROOT_STICKY_CLASS = 0x05,
106 HPROF_ROOT_THREAD_BLOCK = 0x06,
107 HPROF_ROOT_MONITOR_USED = 0x07,
108 HPROF_ROOT_THREAD_OBJECT = 0x08,
109 HPROF_CLASS_DUMP = 0x20,
110 HPROF_INSTANCE_DUMP = 0x21,
111 HPROF_OBJECT_ARRAY_DUMP = 0x22,
112 HPROF_PRIMITIVE_ARRAY_DUMP = 0x23,
113
114 // Android.
115 HPROF_HEAP_DUMP_INFO = 0xfe,
116 HPROF_ROOT_INTERNED_STRING = 0x89,
117 HPROF_ROOT_FINALIZING = 0x8a, // Obsolete.
118 HPROF_ROOT_DEBUGGER = 0x8b,
119 HPROF_ROOT_REFERENCE_CLEANUP = 0x8c, // Obsolete.
120 HPROF_ROOT_VM_INTERNAL = 0x8d,
121 HPROF_ROOT_JNI_MONITOR = 0x8e,
122 HPROF_UNREACHABLE = 0x90, // Obsolete.
123 HPROF_PRIMITIVE_ARRAY_NODATA_DUMP = 0xc3, // Obsolete.
124 };
125
126 enum HprofHeapId {
127 HPROF_HEAP_DEFAULT = 0,
128 HPROF_HEAP_ZYGOTE = 'Z',
129 HPROF_HEAP_APP = 'A',
130 HPROF_HEAP_IMAGE = 'I',
131 };
132
133 enum HprofBasicType {
134 hprof_basic_object = 2,
135 hprof_basic_boolean = 4,
136 hprof_basic_char = 5,
137 hprof_basic_float = 6,
138 hprof_basic_double = 7,
139 hprof_basic_byte = 8,
140 hprof_basic_short = 9,
141 hprof_basic_int = 10,
142 hprof_basic_long = 11,
143 };
144
145 typedef uint32_t HprofStringId;
146 typedef uint32_t HprofClassObjectId;
147
148 class EndianOutput {
149 public:
EndianOutput()150 EndianOutput() : length_(0), sum_length_(0), max_length_(0), started_(false) {}
~EndianOutput()151 virtual ~EndianOutput() {}
152
StartNewRecord(uint8_t tag,uint32_t time)153 void StartNewRecord(uint8_t tag, uint32_t time) {
154 if (length_ > 0) {
155 EndRecord();
156 }
157 DCHECK_EQ(length_, 0U);
158 AddU1(tag);
159 AddU4(time);
160 AddU4(0xdeaddead); // Length, replaced on flush.
161 started_ = true;
162 }
163
EndRecord()164 void EndRecord() {
165 // Replace length in header.
166 if (started_) {
167 UpdateU4(sizeof(uint8_t) + sizeof(uint32_t),
168 length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
169 }
170
171 HandleEndRecord();
172
173 sum_length_ += length_;
174 max_length_ = std::max(max_length_, length_);
175 length_ = 0;
176 started_ = false;
177 }
178
AddU1(uint8_t value)179 void AddU1(uint8_t value) {
180 AddU1List(&value, 1);
181 }
AddU2(uint16_t value)182 void AddU2(uint16_t value) {
183 AddU2List(&value, 1);
184 }
AddU4(uint32_t value)185 void AddU4(uint32_t value) {
186 AddU4List(&value, 1);
187 }
188
AddU8(uint64_t value)189 void AddU8(uint64_t value) {
190 AddU8List(&value, 1);
191 }
192
AddObjectId(const mirror::Object * value)193 void AddObjectId(const mirror::Object* value) {
194 AddU4(PointerToLowMemUInt32(value));
195 }
196
197 // The ID for the synthetic object generated to account for class static overhead.
AddClassStaticsId(const mirror::Class * value)198 void AddClassStaticsId(const mirror::Class* value) {
199 AddU4(1 | PointerToLowMemUInt32(value));
200 }
201
AddJniGlobalRefId(jobject value)202 void AddJniGlobalRefId(jobject value) {
203 AddU4(PointerToLowMemUInt32(value));
204 }
205
AddClassId(HprofClassObjectId value)206 void AddClassId(HprofClassObjectId value) {
207 AddU4(value);
208 }
209
AddStringId(HprofStringId value)210 void AddStringId(HprofStringId value) {
211 AddU4(value);
212 }
213
AddU1List(const uint8_t * values,size_t count)214 void AddU1List(const uint8_t* values, size_t count) {
215 HandleU1List(values, count);
216 length_ += count;
217 }
AddU2List(const uint16_t * values,size_t count)218 void AddU2List(const uint16_t* values, size_t count) {
219 HandleU2List(values, count);
220 length_ += count * sizeof(uint16_t);
221 }
AddU4List(const uint32_t * values,size_t count)222 void AddU4List(const uint32_t* values, size_t count) {
223 HandleU4List(values, count);
224 length_ += count * sizeof(uint32_t);
225 }
UpdateU4(size_t offset,uint32_t new_value ATTRIBUTE_UNUSED)226 virtual void UpdateU4(size_t offset, uint32_t new_value ATTRIBUTE_UNUSED) {
227 DCHECK_LE(offset, length_ - 4);
228 }
AddU8List(const uint64_t * values,size_t count)229 void AddU8List(const uint64_t* values, size_t count) {
230 HandleU8List(values, count);
231 length_ += count * sizeof(uint64_t);
232 }
233
AddIdList(mirror::ObjectArray<mirror::Object> * values)234 void AddIdList(mirror::ObjectArray<mirror::Object>* values)
235 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
236 const int32_t length = values->GetLength();
237 for (int32_t i = 0; i < length; ++i) {
238 AddObjectId(values->GetWithoutChecks(i));
239 }
240 }
241
AddUtf8String(const char * str)242 void AddUtf8String(const char* str) {
243 // The terminating NUL character is NOT written.
244 AddU1List((const uint8_t*)str, strlen(str));
245 }
246
Length() const247 size_t Length() const {
248 return length_;
249 }
250
SumLength() const251 size_t SumLength() const {
252 return sum_length_;
253 }
254
MaxLength() const255 size_t MaxLength() const {
256 return max_length_;
257 }
258
259 protected:
HandleU1List(const uint8_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)260 virtual void HandleU1List(const uint8_t* values ATTRIBUTE_UNUSED,
261 size_t count ATTRIBUTE_UNUSED) {
262 }
HandleU2List(const uint16_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)263 virtual void HandleU2List(const uint16_t* values ATTRIBUTE_UNUSED,
264 size_t count ATTRIBUTE_UNUSED) {
265 }
HandleU4List(const uint32_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)266 virtual void HandleU4List(const uint32_t* values ATTRIBUTE_UNUSED,
267 size_t count ATTRIBUTE_UNUSED) {
268 }
HandleU8List(const uint64_t * values ATTRIBUTE_UNUSED,size_t count ATTRIBUTE_UNUSED)269 virtual void HandleU8List(const uint64_t* values ATTRIBUTE_UNUSED,
270 size_t count ATTRIBUTE_UNUSED) {
271 }
HandleEndRecord()272 virtual void HandleEndRecord() {
273 }
274
275 size_t length_; // Current record size.
276 size_t sum_length_; // Size of all data.
277 size_t max_length_; // Maximum seen length.
278 bool started_; // Was StartRecord called?
279 };
280
281 // This keeps things buffered until flushed.
282 class EndianOutputBuffered : public EndianOutput {
283 public:
EndianOutputBuffered(size_t reserve_size)284 explicit EndianOutputBuffered(size_t reserve_size) {
285 buffer_.reserve(reserve_size);
286 }
~EndianOutputBuffered()287 virtual ~EndianOutputBuffered() {}
288
UpdateU4(size_t offset,uint32_t new_value)289 void UpdateU4(size_t offset, uint32_t new_value) OVERRIDE {
290 DCHECK_LE(offset, length_ - 4);
291 buffer_[offset + 0] = static_cast<uint8_t>((new_value >> 24) & 0xFF);
292 buffer_[offset + 1] = static_cast<uint8_t>((new_value >> 16) & 0xFF);
293 buffer_[offset + 2] = static_cast<uint8_t>((new_value >> 8) & 0xFF);
294 buffer_[offset + 3] = static_cast<uint8_t>((new_value >> 0) & 0xFF);
295 }
296
297 protected:
HandleU1List(const uint8_t * values,size_t count)298 void HandleU1List(const uint8_t* values, size_t count) OVERRIDE {
299 DCHECK_EQ(length_, buffer_.size());
300 buffer_.insert(buffer_.end(), values, values + count);
301 }
302
HandleU2List(const uint16_t * values,size_t count)303 void HandleU2List(const uint16_t* values, size_t count) OVERRIDE {
304 DCHECK_EQ(length_, buffer_.size());
305 for (size_t i = 0; i < count; ++i) {
306 uint16_t value = *values;
307 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
308 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
309 values++;
310 }
311 }
312
HandleU4List(const uint32_t * values,size_t count)313 void HandleU4List(const uint32_t* values, size_t count) OVERRIDE {
314 DCHECK_EQ(length_, buffer_.size());
315 for (size_t i = 0; i < count; ++i) {
316 uint32_t value = *values;
317 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
318 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
319 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
320 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
321 values++;
322 }
323 }
324
HandleU8List(const uint64_t * values,size_t count)325 void HandleU8List(const uint64_t* values, size_t count) OVERRIDE {
326 DCHECK_EQ(length_, buffer_.size());
327 for (size_t i = 0; i < count; ++i) {
328 uint64_t value = *values;
329 buffer_.push_back(static_cast<uint8_t>((value >> 56) & 0xFF));
330 buffer_.push_back(static_cast<uint8_t>((value >> 48) & 0xFF));
331 buffer_.push_back(static_cast<uint8_t>((value >> 40) & 0xFF));
332 buffer_.push_back(static_cast<uint8_t>((value >> 32) & 0xFF));
333 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
334 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
335 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
336 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
337 values++;
338 }
339 }
340
HandleEndRecord()341 void HandleEndRecord() OVERRIDE {
342 DCHECK_EQ(buffer_.size(), length_);
343 if (kIsDebugBuild && started_) {
344 uint32_t stored_length =
345 static_cast<uint32_t>(buffer_[5]) << 24 |
346 static_cast<uint32_t>(buffer_[6]) << 16 |
347 static_cast<uint32_t>(buffer_[7]) << 8 |
348 static_cast<uint32_t>(buffer_[8]);
349 DCHECK_EQ(stored_length, length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
350 }
351 HandleFlush(buffer_.data(), length_);
352 buffer_.clear();
353 }
354
HandleFlush(const uint8_t * buffer ATTRIBUTE_UNUSED,size_t length ATTRIBUTE_UNUSED)355 virtual void HandleFlush(const uint8_t* buffer ATTRIBUTE_UNUSED, size_t length ATTRIBUTE_UNUSED) {
356 }
357
358 std::vector<uint8_t> buffer_;
359 };
360
361 class FileEndianOutput FINAL : public EndianOutputBuffered {
362 public:
FileEndianOutput(File * fp,size_t reserved_size)363 FileEndianOutput(File* fp, size_t reserved_size)
364 : EndianOutputBuffered(reserved_size), fp_(fp), errors_(false) {
365 DCHECK(fp != nullptr);
366 }
~FileEndianOutput()367 ~FileEndianOutput() {
368 }
369
Errors()370 bool Errors() {
371 return errors_;
372 }
373
374 protected:
HandleFlush(const uint8_t * buffer,size_t length)375 void HandleFlush(const uint8_t* buffer, size_t length) OVERRIDE {
376 if (!errors_) {
377 errors_ = !fp_->WriteFully(buffer, length);
378 }
379 }
380
381 private:
382 File* fp_;
383 bool errors_;
384 };
385
386 class NetStateEndianOutput FINAL : public EndianOutputBuffered {
387 public:
NetStateEndianOutput(JDWP::JdwpNetStateBase * net_state,size_t reserved_size)388 NetStateEndianOutput(JDWP::JdwpNetStateBase* net_state, size_t reserved_size)
389 : EndianOutputBuffered(reserved_size), net_state_(net_state) {
390 DCHECK(net_state != nullptr);
391 }
~NetStateEndianOutput()392 ~NetStateEndianOutput() {}
393
394 protected:
HandleFlush(const uint8_t * buffer,size_t length)395 void HandleFlush(const uint8_t* buffer, size_t length) OVERRIDE {
396 std::vector<iovec> iov;
397 iov.push_back(iovec());
398 iov[0].iov_base = const_cast<void*>(reinterpret_cast<const void*>(buffer));
399 iov[0].iov_len = length;
400 net_state_->WriteBufferedPacketLocked(iov);
401 }
402
403 private:
404 JDWP::JdwpNetStateBase* net_state_;
405 };
406
407 #define __ output_->
408
409 class Hprof : public SingleRootVisitor {
410 public:
Hprof(const char * output_filename,int fd,bool direct_to_ddms)411 Hprof(const char* output_filename, int fd, bool direct_to_ddms)
412 : filename_(output_filename),
413 fd_(fd),
414 direct_to_ddms_(direct_to_ddms),
415 start_ns_(NanoTime()),
416 current_heap_(HPROF_HEAP_DEFAULT),
417 objects_in_segment_(0),
418 next_string_id_(0x400000) {
419 LOG(INFO) << "hprof: heap dump \"" << filename_ << "\" starting...";
420 }
421
Dump()422 void Dump()
423 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
424 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_) {
425 // First pass to measure the size of the dump.
426 size_t overall_size;
427 size_t max_length;
428 {
429 EndianOutput count_output;
430 output_ = &count_output;
431 ProcessHeap(false);
432 overall_size = count_output.SumLength();
433 max_length = count_output.MaxLength();
434 output_ = nullptr;
435 }
436
437 bool okay;
438 if (direct_to_ddms_) {
439 if (kDirectStream) {
440 okay = DumpToDdmsDirect(overall_size, max_length, CHUNK_TYPE("HPDS"));
441 } else {
442 okay = DumpToDdmsBuffered(overall_size, max_length);
443 }
444 } else {
445 okay = DumpToFile(overall_size, max_length);
446 }
447
448 if (okay) {
449 uint64_t duration = NanoTime() - start_ns_;
450 LOG(INFO) << "hprof: heap dump completed ("
451 << PrettySize(RoundUp(overall_size, 1024))
452 << ") in " << PrettyDuration(duration);
453 }
454 }
455
456 private:
VisitObjectCallback(mirror::Object * obj,void * arg)457 static void VisitObjectCallback(mirror::Object* obj, void* arg)
458 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
459 DCHECK(obj != nullptr);
460 DCHECK(arg != nullptr);
461 reinterpret_cast<Hprof*>(arg)->DumpHeapObject(obj);
462 }
463
464 void DumpHeapObject(mirror::Object* obj)
465 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
466
467 void DumpHeapClass(mirror::Class* klass)
468 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
469
470 void DumpHeapArray(mirror::Array* obj, mirror::Class* klass)
471 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
472
473 void DumpHeapInstanceObject(mirror::Object* obj, mirror::Class* klass)
474 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
475
ProcessHeap(bool header_first)476 void ProcessHeap(bool header_first)
477 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
478 // Reset current heap and object count.
479 current_heap_ = HPROF_HEAP_DEFAULT;
480 objects_in_segment_ = 0;
481
482 if (header_first) {
483 ProcessHeader();
484 ProcessBody();
485 } else {
486 ProcessBody();
487 ProcessHeader();
488 }
489 }
490
ProcessBody()491 void ProcessBody() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
492 Runtime* const runtime = Runtime::Current();
493 // Walk the roots and the heap.
494 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
495
496 runtime->VisitRoots(this);
497 runtime->VisitImageRoots(this);
498 runtime->GetHeap()->VisitObjectsPaused(VisitObjectCallback, this);
499
500 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_END, kHprofTime);
501 output_->EndRecord();
502 }
503
ProcessHeader()504 void ProcessHeader() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
505 // Write the header.
506 WriteFixedHeader();
507 // Write the string and class tables, and any stack traces, to the header.
508 // (jhat requires that these appear before any of the data in the body that refers to them.)
509 WriteStringTable();
510 WriteClassTable();
511 WriteStackTraces();
512 output_->EndRecord();
513 }
514
WriteClassTable()515 void WriteClassTable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
516 uint32_t nextSerialNumber = 1;
517
518 for (mirror::Class* c : classes_) {
519 CHECK(c != nullptr);
520 output_->StartNewRecord(HPROF_TAG_LOAD_CLASS, kHprofTime);
521 // LOAD CLASS format:
522 // U4: class serial number (always > 0)
523 // ID: class object ID. We use the address of the class object structure as its ID.
524 // U4: stack trace serial number
525 // ID: class name string ID
526 __ AddU4(nextSerialNumber++);
527 __ AddObjectId(c);
528 __ AddU4(kHprofNullStackTrace);
529 __ AddStringId(LookupClassNameId(c));
530 }
531 }
532
WriteStringTable()533 void WriteStringTable() {
534 for (const std::pair<std::string, HprofStringId>& p : strings_) {
535 const std::string& string = p.first;
536 const size_t id = p.second;
537
538 output_->StartNewRecord(HPROF_TAG_STRING, kHprofTime);
539
540 // STRING format:
541 // ID: ID for this string
542 // U1*: UTF8 characters for string (NOT null terminated)
543 // (the record format encodes the length)
544 __ AddU4(id);
545 __ AddUtf8String(string.c_str());
546 }
547 }
548
StartNewHeapDumpSegment()549 void StartNewHeapDumpSegment() {
550 // This flushes the old segment and starts a new one.
551 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
552 objects_in_segment_ = 0;
553 // Starting a new HEAP_DUMP resets the heap to default.
554 current_heap_ = HPROF_HEAP_DEFAULT;
555 }
556
CheckHeapSegmentConstraints()557 void CheckHeapSegmentConstraints() {
558 if (objects_in_segment_ >= kMaxObjectsPerSegment || output_->Length() >= kMaxBytesPerSegment) {
559 StartNewHeapDumpSegment();
560 }
561 }
562
563 void VisitRoot(mirror::Object* obj, const RootInfo& root_info)
564 OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
565 void MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
566 uint32_t thread_serial);
567
LookupClassId(mirror::Class * c)568 HprofClassObjectId LookupClassId(mirror::Class* c) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
569 if (c != nullptr) {
570 auto result = classes_.insert(c);
571 const mirror::Class* present = *result.first;
572 CHECK_EQ(present, c);
573 // Make sure that we've assigned a string ID for this class' name
574 LookupClassNameId(c);
575 }
576 return PointerToLowMemUInt32(c);
577 }
578
LookupStringId(mirror::String * string)579 HprofStringId LookupStringId(mirror::String* string) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
580 return LookupStringId(string->ToModifiedUtf8());
581 }
582
LookupStringId(const char * string)583 HprofStringId LookupStringId(const char* string) {
584 return LookupStringId(std::string(string));
585 }
586
LookupStringId(const std::string & string)587 HprofStringId LookupStringId(const std::string& string) {
588 auto it = strings_.find(string);
589 if (it != strings_.end()) {
590 return it->second;
591 }
592 HprofStringId id = next_string_id_++;
593 strings_.Put(string, id);
594 return id;
595 }
596
LookupClassNameId(mirror::Class * c)597 HprofStringId LookupClassNameId(mirror::Class* c) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
598 return LookupStringId(PrettyDescriptor(c));
599 }
600
WriteFixedHeader()601 void WriteFixedHeader() {
602 // Write the file header.
603 // U1: NUL-terminated magic string.
604 const char magic[] = "JAVA PROFILE 1.0.3";
605 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic));
606
607 // U4: size of identifiers. We're using addresses as IDs and our heap references are stored
608 // as uint32_t.
609 // Note of warning: hprof-conv hard-codes the size of identifiers to 4.
610 static_assert(sizeof(mirror::HeapReference<mirror::Object>) == sizeof(uint32_t),
611 "Unexpected HeapReference size");
612 __ AddU4(sizeof(uint32_t));
613
614 // The current time, in milliseconds since 0:00 GMT, 1/1/70.
615 timeval now;
616 const uint64_t nowMs = (gettimeofday(&now, nullptr) < 0) ? 0 :
617 (uint64_t)now.tv_sec * 1000 + now.tv_usec / 1000;
618 // TODO: It seems it would be correct to use U8.
619 // U4: high word of the 64-bit time.
620 __ AddU4(static_cast<uint32_t>(nowMs >> 32));
621 // U4: low word of the 64-bit time.
622 __ AddU4(static_cast<uint32_t>(nowMs & 0xFFFFFFFF));
623 }
624
WriteStackTraces()625 void WriteStackTraces() {
626 // Write a dummy stack trace record so the analysis tools don't freak out.
627 output_->StartNewRecord(HPROF_TAG_STACK_TRACE, kHprofTime);
628 __ AddU4(kHprofNullStackTrace);
629 __ AddU4(kHprofNullThread);
630 __ AddU4(0); // no frames
631 }
632
DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED,size_t max_length ATTRIBUTE_UNUSED)633 bool DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED, size_t max_length ATTRIBUTE_UNUSED)
634 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
635 LOG(FATAL) << "Unimplemented";
636 UNREACHABLE();
637 // // Send the data off to DDMS.
638 // iovec iov[2];
639 // iov[0].iov_base = header_data_ptr_;
640 // iov[0].iov_len = header_data_size_;
641 // iov[1].iov_base = body_data_ptr_;
642 // iov[1].iov_len = body_data_size_;
643 // Dbg::DdmSendChunkV(CHUNK_TYPE("HPDS"), iov, 2);
644 }
645
DumpToFile(size_t overall_size,size_t max_length)646 bool DumpToFile(size_t overall_size, size_t max_length)
647 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
648 // Where exactly are we writing to?
649 int out_fd;
650 if (fd_ >= 0) {
651 out_fd = dup(fd_);
652 if (out_fd < 0) {
653 ThrowRuntimeException("Couldn't dump heap; dup(%d) failed: %s", fd_, strerror(errno));
654 return false;
655 }
656 } else {
657 out_fd = open(filename_.c_str(), O_WRONLY|O_CREAT|O_TRUNC, 0644);
658 if (out_fd < 0) {
659 ThrowRuntimeException("Couldn't dump heap; open(\"%s\") failed: %s", filename_.c_str(),
660 strerror(errno));
661 return false;
662 }
663 }
664
665 std::unique_ptr<File> file(new File(out_fd, filename_, true));
666 bool okay;
667 {
668 FileEndianOutput file_output(file.get(), max_length);
669 output_ = &file_output;
670 ProcessHeap(true);
671 okay = !file_output.Errors();
672
673 if (okay) {
674 // Check for expected size. Output is expected to be less-or-equal than first phase, see
675 // b/23521263.
676 DCHECK_LE(file_output.SumLength(), overall_size);
677 }
678 output_ = nullptr;
679 }
680
681 if (okay) {
682 okay = file->FlushCloseOrErase() == 0;
683 } else {
684 file->Erase();
685 }
686 if (!okay) {
687 std::string msg(StringPrintf("Couldn't dump heap; writing \"%s\" failed: %s",
688 filename_.c_str(), strerror(errno)));
689 ThrowRuntimeException("%s", msg.c_str());
690 LOG(ERROR) << msg;
691 }
692
693 return okay;
694 }
695
DumpToDdmsDirect(size_t overall_size,size_t max_length,uint32_t chunk_type)696 bool DumpToDdmsDirect(size_t overall_size, size_t max_length, uint32_t chunk_type)
697 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
698 CHECK(direct_to_ddms_);
699 JDWP::JdwpState* state = Dbg::GetJdwpState();
700 CHECK(state != nullptr);
701 JDWP::JdwpNetStateBase* net_state = state->netState;
702 CHECK(net_state != nullptr);
703
704 // Hold the socket lock for the whole time since we want this to be atomic.
705 MutexLock mu(Thread::Current(), *net_state->GetSocketLock());
706
707 // Prepare the Ddms chunk.
708 constexpr size_t kChunkHeaderSize = kJDWPHeaderLen + 8;
709 uint8_t chunk_header[kChunkHeaderSize] = { 0 };
710 state->SetupChunkHeader(chunk_type, overall_size, kChunkHeaderSize, chunk_header);
711
712 // Prepare the output and send the chunk header.
713 NetStateEndianOutput net_output(net_state, max_length);
714 output_ = &net_output;
715 net_output.AddU1List(chunk_header, kChunkHeaderSize);
716
717 // Write the dump.
718 ProcessHeap(true);
719
720 // Check for expected size. See DumpToFile for comment.
721 DCHECK_LE(net_output.SumLength(), overall_size + kChunkHeaderSize);
722 output_ = nullptr;
723
724 return true;
725 }
726
727 // If direct_to_ddms_ is set, "filename_" and "fd" will be ignored.
728 // Otherwise, "filename_" must be valid, though if "fd" >= 0 it will
729 // only be used for debug messages.
730 std::string filename_;
731 int fd_;
732 bool direct_to_ddms_;
733
734 uint64_t start_ns_;
735
736 EndianOutput* output_;
737
738 HprofHeapId current_heap_; // Which heap we're currently dumping.
739 size_t objects_in_segment_;
740
741 std::set<mirror::Class*> classes_;
742 HprofStringId next_string_id_;
743 SafeMap<std::string, HprofStringId> strings_;
744
745 DISALLOW_COPY_AND_ASSIGN(Hprof);
746 };
747
SignatureToBasicTypeAndSize(const char * sig,size_t * size_out)748 static HprofBasicType SignatureToBasicTypeAndSize(const char* sig, size_t* size_out) {
749 char c = sig[0];
750 HprofBasicType ret;
751 size_t size;
752
753 switch (c) {
754 case '[':
755 case 'L':
756 ret = hprof_basic_object;
757 size = 4;
758 break;
759 case 'Z':
760 ret = hprof_basic_boolean;
761 size = 1;
762 break;
763 case 'C':
764 ret = hprof_basic_char;
765 size = 2;
766 break;
767 case 'F':
768 ret = hprof_basic_float;
769 size = 4;
770 break;
771 case 'D':
772 ret = hprof_basic_double;
773 size = 8;
774 break;
775 case 'B':
776 ret = hprof_basic_byte;
777 size = 1;
778 break;
779 case 'S':
780 ret = hprof_basic_short;
781 size = 2;
782 break;
783 case 'I':
784 ret = hprof_basic_int;
785 size = 4;
786 break;
787 case 'J':
788 ret = hprof_basic_long;
789 size = 8;
790 break;
791 default:
792 LOG(FATAL) << "UNREACHABLE";
793 UNREACHABLE();
794 }
795
796 if (size_out != nullptr) {
797 *size_out = size;
798 }
799
800 return ret;
801 }
802
803 // Always called when marking objects, but only does
804 // something when ctx->gc_scan_state_ is non-zero, which is usually
805 // only true when marking the root set or unreachable
806 // objects. Used to add rootset references to obj.
MarkRootObject(const mirror::Object * obj,jobject jni_obj,HprofHeapTag heap_tag,uint32_t thread_serial)807 void Hprof::MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
808 uint32_t thread_serial) {
809 if (heap_tag == 0) {
810 return;
811 }
812
813 CheckHeapSegmentConstraints();
814
815 switch (heap_tag) {
816 // ID: object ID
817 case HPROF_ROOT_UNKNOWN:
818 case HPROF_ROOT_STICKY_CLASS:
819 case HPROF_ROOT_MONITOR_USED:
820 case HPROF_ROOT_INTERNED_STRING:
821 case HPROF_ROOT_DEBUGGER:
822 case HPROF_ROOT_VM_INTERNAL:
823 __ AddU1(heap_tag);
824 __ AddObjectId(obj);
825 break;
826
827 // ID: object ID
828 // ID: JNI global ref ID
829 case HPROF_ROOT_JNI_GLOBAL:
830 __ AddU1(heap_tag);
831 __ AddObjectId(obj);
832 __ AddJniGlobalRefId(jni_obj);
833 break;
834
835 // ID: object ID
836 // U4: thread serial number
837 // U4: frame number in stack trace (-1 for empty)
838 case HPROF_ROOT_JNI_LOCAL:
839 case HPROF_ROOT_JNI_MONITOR:
840 case HPROF_ROOT_JAVA_FRAME:
841 __ AddU1(heap_tag);
842 __ AddObjectId(obj);
843 __ AddU4(thread_serial);
844 __ AddU4((uint32_t)-1);
845 break;
846
847 // ID: object ID
848 // U4: thread serial number
849 case HPROF_ROOT_NATIVE_STACK:
850 case HPROF_ROOT_THREAD_BLOCK:
851 __ AddU1(heap_tag);
852 __ AddObjectId(obj);
853 __ AddU4(thread_serial);
854 break;
855
856 // ID: thread object ID
857 // U4: thread serial number
858 // U4: stack trace serial number
859 case HPROF_ROOT_THREAD_OBJECT:
860 __ AddU1(heap_tag);
861 __ AddObjectId(obj);
862 __ AddU4(thread_serial);
863 __ AddU4((uint32_t)-1); // xxx
864 break;
865
866 case HPROF_CLASS_DUMP:
867 case HPROF_INSTANCE_DUMP:
868 case HPROF_OBJECT_ARRAY_DUMP:
869 case HPROF_PRIMITIVE_ARRAY_DUMP:
870 case HPROF_HEAP_DUMP_INFO:
871 case HPROF_PRIMITIVE_ARRAY_NODATA_DUMP:
872 // Ignored.
873 break;
874
875 case HPROF_ROOT_FINALIZING:
876 case HPROF_ROOT_REFERENCE_CLEANUP:
877 case HPROF_UNREACHABLE:
878 LOG(FATAL) << "obsolete tag " << static_cast<int>(heap_tag);
879 break;
880 }
881
882 ++objects_in_segment_;
883 }
884
StackTraceSerialNumber(const mirror::Object *)885 static int StackTraceSerialNumber(const mirror::Object* /*obj*/) {
886 return kHprofNullStackTrace;
887 }
888
DumpHeapObject(mirror::Object * obj)889 void Hprof::DumpHeapObject(mirror::Object* obj) {
890 // Ignore classes that are retired.
891 if (obj->IsClass() && obj->AsClass()->IsRetired()) {
892 return;
893 }
894
895 gc::Heap* const heap = Runtime::Current()->GetHeap();
896 const gc::space::ContinuousSpace* const space = heap->FindContinuousSpaceFromObject(obj, true);
897 HprofHeapId heap_type = HPROF_HEAP_APP;
898 if (space != nullptr) {
899 if (space->IsZygoteSpace()) {
900 heap_type = HPROF_HEAP_ZYGOTE;
901 } else if (space->IsImageSpace()) {
902 heap_type = HPROF_HEAP_IMAGE;
903 }
904 } else {
905 const auto* los = heap->GetLargeObjectsSpace();
906 if (los->Contains(obj) && los->IsZygoteLargeObject(Thread::Current(), obj)) {
907 heap_type = HPROF_HEAP_ZYGOTE;
908 }
909 }
910 CheckHeapSegmentConstraints();
911
912 if (heap_type != current_heap_) {
913 HprofStringId nameId;
914
915 // This object is in a different heap than the current one.
916 // Emit a HEAP_DUMP_INFO tag to change heaps.
917 __ AddU1(HPROF_HEAP_DUMP_INFO);
918 __ AddU4(static_cast<uint32_t>(heap_type)); // uint32_t: heap type
919 switch (heap_type) {
920 case HPROF_HEAP_APP:
921 nameId = LookupStringId("app");
922 break;
923 case HPROF_HEAP_ZYGOTE:
924 nameId = LookupStringId("zygote");
925 break;
926 case HPROF_HEAP_IMAGE:
927 nameId = LookupStringId("image");
928 break;
929 default:
930 // Internal error
931 LOG(ERROR) << "Unexpected desiredHeap";
932 nameId = LookupStringId("<ILLEGAL>");
933 break;
934 }
935 __ AddStringId(nameId);
936 current_heap_ = heap_type;
937 }
938
939 mirror::Class* c = obj->GetClass();
940 if (c == nullptr) {
941 // This object will bother HprofReader, because it has a null
942 // class, so just don't dump it. It could be
943 // gDvm.unlinkedJavaLangClass or it could be an object just
944 // allocated which hasn't been initialized yet.
945 } else {
946 if (obj->IsClass()) {
947 DumpHeapClass(obj->AsClass());
948 } else if (c->IsArrayClass()) {
949 DumpHeapArray(obj->AsArray(), c);
950 } else {
951 DumpHeapInstanceObject(obj, c);
952 }
953 }
954
955 ++objects_in_segment_;
956 }
957
DumpHeapClass(mirror::Class * klass)958 void Hprof::DumpHeapClass(mirror::Class* klass) {
959 if (!klass->IsLoaded() && !klass->IsErroneous()) {
960 // Class is allocated but not yet loaded: we cannot access its fields or super class.
961 return;
962 }
963 size_t sFieldCount = klass->NumStaticFields();
964 if (sFieldCount != 0) {
965 int byteLength = sFieldCount * sizeof(JValue); // TODO bogus; fields are packed
966 // Create a byte array to reflect the allocation of the
967 // StaticField array at the end of this class.
968 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
969 __ AddClassStaticsId(klass);
970 __ AddU4(StackTraceSerialNumber(klass));
971 __ AddU4(byteLength);
972 __ AddU1(hprof_basic_byte);
973 for (int i = 0; i < byteLength; ++i) {
974 __ AddU1(0);
975 }
976 }
977
978 __ AddU1(HPROF_CLASS_DUMP);
979 __ AddClassId(LookupClassId(klass));
980 __ AddU4(StackTraceSerialNumber(klass));
981 __ AddClassId(LookupClassId(klass->GetSuperClass()));
982 __ AddObjectId(klass->GetClassLoader());
983 __ AddObjectId(nullptr); // no signer
984 __ AddObjectId(nullptr); // no prot domain
985 __ AddObjectId(nullptr); // reserved
986 __ AddObjectId(nullptr); // reserved
987 if (klass->IsClassClass()) {
988 // ClassObjects have their static fields appended, so aren't all the same size.
989 // But they're at least this size.
990 __ AddU4(sizeof(mirror::Class)); // instance size
991 } else if (klass->IsStringClass()) {
992 // Strings are variable length with character data at the end like arrays.
993 // This outputs the size of an empty string.
994 __ AddU4(sizeof(mirror::String));
995 } else if (klass->IsArrayClass() || klass->IsPrimitive()) {
996 __ AddU4(0);
997 } else {
998 __ AddU4(klass->GetObjectSize()); // instance size
999 }
1000
1001 __ AddU2(0); // empty const pool
1002
1003 // Static fields
1004 if (sFieldCount == 0) {
1005 __ AddU2((uint16_t)0);
1006 } else {
1007 __ AddU2((uint16_t)(sFieldCount+1));
1008 __ AddStringId(LookupStringId(kStaticOverheadName));
1009 __ AddU1(hprof_basic_object);
1010 __ AddClassStaticsId(klass);
1011
1012 for (size_t i = 0; i < sFieldCount; ++i) {
1013 ArtField* f = klass->GetStaticField(i);
1014
1015 size_t size;
1016 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), &size);
1017 __ AddStringId(LookupStringId(f->GetName()));
1018 __ AddU1(t);
1019 switch (t) {
1020 case hprof_basic_byte:
1021 __ AddU1(f->GetByte(klass));
1022 break;
1023 case hprof_basic_boolean:
1024 __ AddU1(f->GetBoolean(klass));
1025 break;
1026 case hprof_basic_char:
1027 __ AddU2(f->GetChar(klass));
1028 break;
1029 case hprof_basic_short:
1030 __ AddU2(f->GetShort(klass));
1031 break;
1032 case hprof_basic_float:
1033 case hprof_basic_int:
1034 case hprof_basic_object:
1035 __ AddU4(f->Get32(klass));
1036 break;
1037 case hprof_basic_double:
1038 case hprof_basic_long:
1039 __ AddU8(f->Get64(klass));
1040 break;
1041 default:
1042 LOG(FATAL) << "Unexpected size " << size;
1043 UNREACHABLE();
1044 }
1045 }
1046 }
1047
1048 // Instance fields for this class (no superclass fields)
1049 int iFieldCount = klass->NumInstanceFields();
1050 if (klass->IsStringClass()) {
1051 __ AddU2((uint16_t)iFieldCount + 1);
1052 } else {
1053 __ AddU2((uint16_t)iFieldCount);
1054 }
1055 for (int i = 0; i < iFieldCount; ++i) {
1056 ArtField* f = klass->GetInstanceField(i);
1057 __ AddStringId(LookupStringId(f->GetName()));
1058 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), nullptr);
1059 __ AddU1(t);
1060 }
1061 // Add native value character array for strings.
1062 if (klass->IsStringClass()) {
1063 __ AddStringId(LookupStringId("value"));
1064 __ AddU1(hprof_basic_object);
1065 }
1066 }
1067
DumpHeapArray(mirror::Array * obj,mirror::Class * klass)1068 void Hprof::DumpHeapArray(mirror::Array* obj, mirror::Class* klass) {
1069 uint32_t length = obj->GetLength();
1070
1071 if (obj->IsObjectArray()) {
1072 // obj is an object array.
1073 __ AddU1(HPROF_OBJECT_ARRAY_DUMP);
1074
1075 __ AddObjectId(obj);
1076 __ AddU4(StackTraceSerialNumber(obj));
1077 __ AddU4(length);
1078 __ AddClassId(LookupClassId(klass));
1079
1080 // Dump the elements, which are always objects or null.
1081 __ AddIdList(obj->AsObjectArray<mirror::Object>());
1082 } else {
1083 size_t size;
1084 HprofBasicType t = SignatureToBasicTypeAndSize(
1085 Primitive::Descriptor(klass->GetComponentType()->GetPrimitiveType()), &size);
1086
1087 // obj is a primitive array.
1088 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1089
1090 __ AddObjectId(obj);
1091 __ AddU4(StackTraceSerialNumber(obj));
1092 __ AddU4(length);
1093 __ AddU1(t);
1094
1095 // Dump the raw, packed element values.
1096 if (size == 1) {
1097 __ AddU1List(reinterpret_cast<const uint8_t*>(obj->GetRawData(sizeof(uint8_t), 0)), length);
1098 } else if (size == 2) {
1099 __ AddU2List(reinterpret_cast<const uint16_t*>(obj->GetRawData(sizeof(uint16_t), 0)), length);
1100 } else if (size == 4) {
1101 __ AddU4List(reinterpret_cast<const uint32_t*>(obj->GetRawData(sizeof(uint32_t), 0)), length);
1102 } else if (size == 8) {
1103 __ AddU8List(reinterpret_cast<const uint64_t*>(obj->GetRawData(sizeof(uint64_t), 0)), length);
1104 }
1105 }
1106 }
1107
DumpHeapInstanceObject(mirror::Object * obj,mirror::Class * klass)1108 void Hprof::DumpHeapInstanceObject(mirror::Object* obj, mirror::Class* klass) {
1109 // obj is an instance object.
1110 __ AddU1(HPROF_INSTANCE_DUMP);
1111 __ AddObjectId(obj);
1112 __ AddU4(StackTraceSerialNumber(obj));
1113 __ AddClassId(LookupClassId(klass));
1114
1115 // Reserve some space for the length of the instance data, which we won't
1116 // know until we're done writing it.
1117 size_t size_patch_offset = output_->Length();
1118 __ AddU4(0x77777777);
1119
1120 // What we will use for the string value if the object is a string.
1121 mirror::Object* string_value = nullptr;
1122
1123 // Write the instance data; fields for this class, followed by super class fields, and so on.
1124 do {
1125 const size_t instance_fields = klass->NumInstanceFields();
1126 for (size_t i = 0; i < instance_fields; ++i) {
1127 ArtField* f = klass->GetInstanceField(i);
1128 size_t size;
1129 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), &size);
1130 switch (t) {
1131 case hprof_basic_byte:
1132 __ AddU1(f->GetByte(obj));
1133 break;
1134 case hprof_basic_boolean:
1135 __ AddU1(f->GetBoolean(obj));
1136 break;
1137 case hprof_basic_char:
1138 __ AddU2(f->GetChar(obj));
1139 break;
1140 case hprof_basic_short:
1141 __ AddU2(f->GetShort(obj));
1142 break;
1143 case hprof_basic_float:
1144 case hprof_basic_int:
1145 case hprof_basic_object:
1146 __ AddU4(f->Get32(obj));
1147 break;
1148 case hprof_basic_double:
1149 case hprof_basic_long:
1150 __ AddU8(f->Get64(obj));
1151 break;
1152 }
1153 }
1154 // Add value field for String if necessary.
1155 if (klass->IsStringClass()) {
1156 mirror::String* s = obj->AsString();
1157 if (s->GetLength() == 0) {
1158 // If string is empty, use an object-aligned address within the string for the value.
1159 string_value = reinterpret_cast<mirror::Object*>(
1160 reinterpret_cast<uintptr_t>(s) + kObjectAlignment);
1161 } else {
1162 string_value = reinterpret_cast<mirror::Object*>(s->GetValue());
1163 }
1164 __ AddObjectId(string_value);
1165 }
1166
1167 klass = klass->GetSuperClass();
1168 } while (klass != nullptr);
1169
1170 // Patch the instance field length.
1171 __ UpdateU4(size_patch_offset, output_->Length() - (size_patch_offset + 4));
1172
1173 // Output native value character array for strings.
1174 CHECK_EQ(obj->IsString(), string_value != nullptr);
1175 if (string_value != nullptr) {
1176 mirror::String* s = obj->AsString();
1177 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1178 __ AddObjectId(string_value);
1179 __ AddU4(StackTraceSerialNumber(obj));
1180 __ AddU4(s->GetLength());
1181 __ AddU1(hprof_basic_char);
1182 __ AddU2List(s->GetValue(), s->GetLength());
1183 }
1184 }
1185
VisitRoot(mirror::Object * obj,const RootInfo & info)1186 void Hprof::VisitRoot(mirror::Object* obj, const RootInfo& info) {
1187 static const HprofHeapTag xlate[] = {
1188 HPROF_ROOT_UNKNOWN,
1189 HPROF_ROOT_JNI_GLOBAL,
1190 HPROF_ROOT_JNI_LOCAL,
1191 HPROF_ROOT_JAVA_FRAME,
1192 HPROF_ROOT_NATIVE_STACK,
1193 HPROF_ROOT_STICKY_CLASS,
1194 HPROF_ROOT_THREAD_BLOCK,
1195 HPROF_ROOT_MONITOR_USED,
1196 HPROF_ROOT_THREAD_OBJECT,
1197 HPROF_ROOT_INTERNED_STRING,
1198 HPROF_ROOT_FINALIZING,
1199 HPROF_ROOT_DEBUGGER,
1200 HPROF_ROOT_REFERENCE_CLEANUP,
1201 HPROF_ROOT_VM_INTERNAL,
1202 HPROF_ROOT_JNI_MONITOR,
1203 };
1204 CHECK_LT(info.GetType(), sizeof(xlate) / sizeof(HprofHeapTag));
1205 if (obj == nullptr) {
1206 return;
1207 }
1208 MarkRootObject(obj, 0, xlate[info.GetType()], info.GetThreadId());
1209 }
1210
1211 // If "direct_to_ddms" is true, the other arguments are ignored, and data is
1212 // sent directly to DDMS.
1213 // If "fd" is >= 0, the output will be written to that file descriptor.
1214 // Otherwise, "filename" is used to create an output file.
DumpHeap(const char * filename,int fd,bool direct_to_ddms)1215 void DumpHeap(const char* filename, int fd, bool direct_to_ddms) {
1216 CHECK(filename != nullptr);
1217
1218 Thread* self = Thread::Current();
1219 gc::Heap* heap = Runtime::Current()->GetHeap();
1220 if (heap->IsGcConcurrentAndMoving()) {
1221 // Need to take a heap dump while GC isn't running. See the
1222 // comment in Heap::VisitObjects().
1223 heap->IncrementDisableMovingGC(self);
1224 }
1225 Runtime::Current()->GetThreadList()->SuspendAll(__FUNCTION__, true /* long suspend */);
1226 Hprof hprof(filename, fd, direct_to_ddms);
1227 hprof.Dump();
1228 Runtime::Current()->GetThreadList()->ResumeAll();
1229 if (heap->IsGcConcurrentAndMoving()) {
1230 heap->DecrementDisableMovingGC(self);
1231 }
1232 }
1233
1234 } // namespace hprof
1235 } // namespace art
1236