1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/profiler/heap-snapshot-generator.h"
6
7 #include "src/code-stubs.h"
8 #include "src/conversions.h"
9 #include "src/debug/debug.h"
10 #include "src/objects-body-descriptors.h"
11 #include "src/profiler/allocation-tracker.h"
12 #include "src/profiler/heap-profiler.h"
13 #include "src/profiler/heap-snapshot-generator-inl.h"
14 #include "src/types.h"
15
16 namespace v8 {
17 namespace internal {
18
19
HeapGraphEdge(Type type,const char * name,int from,int to)20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
21 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
22 to_index_(to),
23 name_(name) {
24 DCHECK(type == kContextVariable
25 || type == kProperty
26 || type == kInternal
27 || type == kShortcut
28 || type == kWeak);
29 }
30
31
HeapGraphEdge(Type type,int index,int from,int to)32 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
33 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
34 to_index_(to),
35 index_(index) {
36 DCHECK(type == kElement || type == kHidden);
37 }
38
39
ReplaceToIndexWithEntry(HeapSnapshot * snapshot)40 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
41 to_entry_ = &snapshot->entries()[to_index_];
42 }
43
44
45 const int HeapEntry::kNoEntry = -1;
46
HeapEntry(HeapSnapshot * snapshot,Type type,const char * name,SnapshotObjectId id,size_t self_size,unsigned trace_node_id)47 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
48 Type type,
49 const char* name,
50 SnapshotObjectId id,
51 size_t self_size,
52 unsigned trace_node_id)
53 : type_(type),
54 children_count_(0),
55 children_index_(-1),
56 self_size_(self_size),
57 snapshot_(snapshot),
58 name_(name),
59 id_(id),
60 trace_node_id_(trace_node_id) { }
61
62
SetNamedReference(HeapGraphEdge::Type type,const char * name,HeapEntry * entry)63 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
64 const char* name,
65 HeapEntry* entry) {
66 HeapGraphEdge edge(type, name, this->index(), entry->index());
67 snapshot_->edges().Add(edge);
68 ++children_count_;
69 }
70
71
SetIndexedReference(HeapGraphEdge::Type type,int index,HeapEntry * entry)72 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
73 int index,
74 HeapEntry* entry) {
75 HeapGraphEdge edge(type, index, this->index(), entry->index());
76 snapshot_->edges().Add(edge);
77 ++children_count_;
78 }
79
80
Print(const char * prefix,const char * edge_name,int max_depth,int indent)81 void HeapEntry::Print(
82 const char* prefix, const char* edge_name, int max_depth, int indent) {
83 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
84 base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
85 ' ', prefix, edge_name);
86 if (type() != kString) {
87 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
88 } else {
89 base::OS::Print("\"");
90 const char* c = name_;
91 while (*c && (c - name_) <= 40) {
92 if (*c != '\n')
93 base::OS::Print("%c", *c);
94 else
95 base::OS::Print("\\n");
96 ++c;
97 }
98 base::OS::Print("\"\n");
99 }
100 if (--max_depth == 0) return;
101 Vector<HeapGraphEdge*> ch = children();
102 for (int i = 0; i < ch.length(); ++i) {
103 HeapGraphEdge& edge = *ch[i];
104 const char* edge_prefix = "";
105 EmbeddedVector<char, 64> index;
106 const char* edge_name = index.start();
107 switch (edge.type()) {
108 case HeapGraphEdge::kContextVariable:
109 edge_prefix = "#";
110 edge_name = edge.name();
111 break;
112 case HeapGraphEdge::kElement:
113 SNPrintF(index, "%d", edge.index());
114 break;
115 case HeapGraphEdge::kInternal:
116 edge_prefix = "$";
117 edge_name = edge.name();
118 break;
119 case HeapGraphEdge::kProperty:
120 edge_name = edge.name();
121 break;
122 case HeapGraphEdge::kHidden:
123 edge_prefix = "$";
124 SNPrintF(index, "%d", edge.index());
125 break;
126 case HeapGraphEdge::kShortcut:
127 edge_prefix = "^";
128 edge_name = edge.name();
129 break;
130 case HeapGraphEdge::kWeak:
131 edge_prefix = "w";
132 edge_name = edge.name();
133 break;
134 default:
135 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
136 }
137 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
138 }
139 }
140
141
TypeAsString()142 const char* HeapEntry::TypeAsString() {
143 switch (type()) {
144 case kHidden: return "/hidden/";
145 case kObject: return "/object/";
146 case kClosure: return "/closure/";
147 case kString: return "/string/";
148 case kCode: return "/code/";
149 case kArray: return "/array/";
150 case kRegExp: return "/regexp/";
151 case kHeapNumber: return "/number/";
152 case kNative: return "/native/";
153 case kSynthetic: return "/synthetic/";
154 case kConsString: return "/concatenated string/";
155 case kSlicedString: return "/sliced string/";
156 case kSymbol: return "/symbol/";
157 case kSimdValue: return "/simd/";
158 default: return "???";
159 }
160 }
161
162
163 // It is very important to keep objects that form a heap snapshot
164 // as small as possible.
165 namespace { // Avoid littering the global namespace.
166
167 template <size_t ptr_size> struct SnapshotSizeConstants;
168
169 template <> struct SnapshotSizeConstants<4> {
170 static const int kExpectedHeapGraphEdgeSize = 12;
171 static const int kExpectedHeapEntrySize = 28;
172 };
173
174 template <> struct SnapshotSizeConstants<8> {
175 static const int kExpectedHeapGraphEdgeSize = 24;
176 static const int kExpectedHeapEntrySize = 40;
177 };
178
179 } // namespace
180
181
HeapSnapshot(HeapProfiler * profiler)182 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
183 : profiler_(profiler),
184 root_index_(HeapEntry::kNoEntry),
185 gc_roots_index_(HeapEntry::kNoEntry),
186 max_snapshot_js_object_id_(0) {
187 STATIC_ASSERT(
188 sizeof(HeapGraphEdge) ==
189 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
190 STATIC_ASSERT(
191 sizeof(HeapEntry) ==
192 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
193 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
194 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
195 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
196 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
197 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
198 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
199 }
200 }
201
202
Delete()203 void HeapSnapshot::Delete() {
204 profiler_->RemoveSnapshot(this);
205 delete this;
206 }
207
208
RememberLastJSObjectId()209 void HeapSnapshot::RememberLastJSObjectId() {
210 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
211 }
212
213
AddSyntheticRootEntries()214 void HeapSnapshot::AddSyntheticRootEntries() {
215 AddRootEntry();
216 AddGcRootsEntry();
217 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
218 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
219 AddGcSubrootEntry(tag, id);
220 id += HeapObjectsMap::kObjectIdStep;
221 }
222 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
223 }
224
225
AddRootEntry()226 HeapEntry* HeapSnapshot::AddRootEntry() {
227 DCHECK(root_index_ == HeapEntry::kNoEntry);
228 DCHECK(entries_.is_empty()); // Root entry must be the first one.
229 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
230 "",
231 HeapObjectsMap::kInternalRootObjectId,
232 0,
233 0);
234 root_index_ = entry->index();
235 DCHECK(root_index_ == 0);
236 return entry;
237 }
238
239
AddGcRootsEntry()240 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
241 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
242 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
243 "(GC roots)",
244 HeapObjectsMap::kGcRootsObjectId,
245 0,
246 0);
247 gc_roots_index_ = entry->index();
248 return entry;
249 }
250
251
AddGcSubrootEntry(int tag,SnapshotObjectId id)252 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
253 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
254 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
255 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
256 VisitorSynchronization::kTagNames[tag], id, 0, 0);
257 gc_subroot_indexes_[tag] = entry->index();
258 return entry;
259 }
260
261
AddEntry(HeapEntry::Type type,const char * name,SnapshotObjectId id,size_t size,unsigned trace_node_id)262 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
263 const char* name,
264 SnapshotObjectId id,
265 size_t size,
266 unsigned trace_node_id) {
267 HeapEntry entry(this, type, name, id, size, trace_node_id);
268 entries_.Add(entry);
269 return &entries_.last();
270 }
271
272
FillChildren()273 void HeapSnapshot::FillChildren() {
274 DCHECK(children().is_empty());
275 children().Allocate(edges().length());
276 int children_index = 0;
277 for (int i = 0; i < entries().length(); ++i) {
278 HeapEntry* entry = &entries()[i];
279 children_index = entry->set_children_index(children_index);
280 }
281 DCHECK(edges().length() == children_index);
282 for (int i = 0; i < edges().length(); ++i) {
283 HeapGraphEdge* edge = &edges()[i];
284 edge->ReplaceToIndexWithEntry(this);
285 edge->from()->add_child(edge);
286 }
287 }
288
289
290 class FindEntryById {
291 public:
FindEntryById(SnapshotObjectId id)292 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
operator ()(HeapEntry * const * entry)293 int operator()(HeapEntry* const* entry) {
294 if ((*entry)->id() == id_) return 0;
295 return (*entry)->id() < id_ ? -1 : 1;
296 }
297 private:
298 SnapshotObjectId id_;
299 };
300
301
GetEntryById(SnapshotObjectId id)302 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
303 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
304 // Perform a binary search by id.
305 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
306 if (index == -1)
307 return NULL;
308 return entries_by_id->at(index);
309 }
310
311
312 template<class T>
SortByIds(const T * entry1_ptr,const T * entry2_ptr)313 static int SortByIds(const T* entry1_ptr,
314 const T* entry2_ptr) {
315 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
316 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
317 }
318
319
GetSortedEntriesList()320 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
321 if (sorted_entries_.is_empty()) {
322 sorted_entries_.Allocate(entries_.length());
323 for (int i = 0; i < entries_.length(); ++i) {
324 sorted_entries_[i] = &entries_[i];
325 }
326 sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
327 SortByIds);
328 }
329 return &sorted_entries_;
330 }
331
332
Print(int max_depth)333 void HeapSnapshot::Print(int max_depth) {
334 root()->Print("", "", max_depth, 0);
335 }
336
337
RawSnapshotSize() const338 size_t HeapSnapshot::RawSnapshotSize() const {
339 return
340 sizeof(*this) +
341 GetMemoryUsedByList(entries_) +
342 GetMemoryUsedByList(edges_) +
343 GetMemoryUsedByList(children_) +
344 GetMemoryUsedByList(sorted_entries_);
345 }
346
347
348 // We split IDs on evens for embedder objects (see
349 // HeapObjectsMap::GenerateId) and odds for native objects.
350 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
351 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
352 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
353 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
354 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
355 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
356 HeapObjectsMap::kGcRootsFirstSubrootId +
357 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
358
359
AddressesMatch(void * key1,void * key2)360 static bool AddressesMatch(void* key1, void* key2) {
361 return key1 == key2;
362 }
363
364
HeapObjectsMap(Heap * heap)365 HeapObjectsMap::HeapObjectsMap(Heap* heap)
366 : next_id_(kFirstAvailableObjectId),
367 entries_map_(AddressesMatch),
368 heap_(heap) {
369 // This dummy element solves a problem with entries_map_.
370 // When we do lookup in HashMap we see no difference between two cases:
371 // it has an entry with NULL as the value or it has created
372 // a new entry on the fly with NULL as the default value.
373 // With such dummy element we have a guaranty that all entries_map_ entries
374 // will have the value field grater than 0.
375 // This fact is using in MoveObject method.
376 entries_.Add(EntryInfo(0, NULL, 0));
377 }
378
379
MoveObject(Address from,Address to,int object_size)380 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
381 DCHECK(to != NULL);
382 DCHECK(from != NULL);
383 if (from == to) return false;
384 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
385 if (from_value == NULL) {
386 // It may occur that some untracked object moves to an address X and there
387 // is a tracked object at that address. In this case we should remove the
388 // entry as we know that the object has died.
389 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
390 if (to_value != NULL) {
391 int to_entry_info_index =
392 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
393 entries_.at(to_entry_info_index).addr = NULL;
394 }
395 } else {
396 HashMap::Entry* to_entry =
397 entries_map_.LookupOrInsert(to, ComputePointerHash(to));
398 if (to_entry->value != NULL) {
399 // We found the existing entry with to address for an old object.
400 // Without this operation we will have two EntryInfo's with the same
401 // value in addr field. It is bad because later at RemoveDeadEntries
402 // one of this entry will be removed with the corresponding entries_map_
403 // entry.
404 int to_entry_info_index =
405 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
406 entries_.at(to_entry_info_index).addr = NULL;
407 }
408 int from_entry_info_index =
409 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
410 entries_.at(from_entry_info_index).addr = to;
411 // Size of an object can change during its life, so to keep information
412 // about the object in entries_ consistent, we have to adjust size when the
413 // object is migrated.
414 if (FLAG_heap_profiler_trace_objects) {
415 PrintF("Move object from %p to %p old size %6d new size %6d\n",
416 from,
417 to,
418 entries_.at(from_entry_info_index).size,
419 object_size);
420 }
421 entries_.at(from_entry_info_index).size = object_size;
422 to_entry->value = from_value;
423 }
424 return from_value != NULL;
425 }
426
427
UpdateObjectSize(Address addr,int size)428 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
429 FindOrAddEntry(addr, size, false);
430 }
431
432
FindEntry(Address addr)433 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
434 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr));
435 if (entry == NULL) return 0;
436 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
437 EntryInfo& entry_info = entries_.at(entry_index);
438 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
439 return entry_info.id;
440 }
441
442
FindOrAddEntry(Address addr,unsigned int size,bool accessed)443 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
444 unsigned int size,
445 bool accessed) {
446 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
447 HashMap::Entry* entry =
448 entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
449 if (entry->value != NULL) {
450 int entry_index =
451 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
452 EntryInfo& entry_info = entries_.at(entry_index);
453 entry_info.accessed = accessed;
454 if (FLAG_heap_profiler_trace_objects) {
455 PrintF("Update object size : %p with old size %d and new size %d\n",
456 addr,
457 entry_info.size,
458 size);
459 }
460 entry_info.size = size;
461 return entry_info.id;
462 }
463 entry->value = reinterpret_cast<void*>(entries_.length());
464 SnapshotObjectId id = next_id_;
465 next_id_ += kObjectIdStep;
466 entries_.Add(EntryInfo(id, addr, size, accessed));
467 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
468 return id;
469 }
470
471
StopHeapObjectsTracking()472 void HeapObjectsMap::StopHeapObjectsTracking() {
473 time_intervals_.Clear();
474 }
475
476
UpdateHeapObjectsMap()477 void HeapObjectsMap::UpdateHeapObjectsMap() {
478 if (FLAG_heap_profiler_trace_objects) {
479 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
480 entries_map_.occupancy());
481 }
482 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
483 "HeapObjectsMap::UpdateHeapObjectsMap");
484 HeapIterator iterator(heap_);
485 for (HeapObject* obj = iterator.next();
486 obj != NULL;
487 obj = iterator.next()) {
488 FindOrAddEntry(obj->address(), obj->Size());
489 if (FLAG_heap_profiler_trace_objects) {
490 PrintF("Update object : %p %6d. Next address is %p\n",
491 obj->address(),
492 obj->Size(),
493 obj->address() + obj->Size());
494 }
495 }
496 RemoveDeadEntries();
497 if (FLAG_heap_profiler_trace_objects) {
498 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
499 entries_map_.occupancy());
500 }
501 }
502
503
504 namespace {
505
506
507 struct HeapObjectInfo {
HeapObjectInfov8::internal::__anon5e8b875a0211::HeapObjectInfo508 HeapObjectInfo(HeapObject* obj, int expected_size)
509 : obj(obj),
510 expected_size(expected_size) {
511 }
512
513 HeapObject* obj;
514 int expected_size;
515
IsValidv8::internal::__anon5e8b875a0211::HeapObjectInfo516 bool IsValid() const { return expected_size == obj->Size(); }
517
Printv8::internal::__anon5e8b875a0211::HeapObjectInfo518 void Print() const {
519 if (expected_size == 0) {
520 PrintF("Untracked object : %p %6d. Next address is %p\n",
521 obj->address(),
522 obj->Size(),
523 obj->address() + obj->Size());
524 } else if (obj->Size() != expected_size) {
525 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
526 expected_size,
527 obj->address(),
528 obj->Size(),
529 obj->address() + obj->Size());
530 } else {
531 PrintF("Good object : %p %6d. Next address is %p\n",
532 obj->address(),
533 expected_size,
534 obj->address() + obj->Size());
535 }
536 }
537 };
538
539
comparator(const HeapObjectInfo * a,const HeapObjectInfo * b)540 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
541 if (a->obj < b->obj) return -1;
542 if (a->obj > b->obj) return 1;
543 return 0;
544 }
545
546
547 } // namespace
548
549
FindUntrackedObjects()550 int HeapObjectsMap::FindUntrackedObjects() {
551 List<HeapObjectInfo> heap_objects(1000);
552
553 HeapIterator iterator(heap_);
554 int untracked = 0;
555 for (HeapObject* obj = iterator.next();
556 obj != NULL;
557 obj = iterator.next()) {
558 HashMap::Entry* entry =
559 entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
560 if (entry == NULL) {
561 ++untracked;
562 if (FLAG_heap_profiler_trace_objects) {
563 heap_objects.Add(HeapObjectInfo(obj, 0));
564 }
565 } else {
566 int entry_index = static_cast<int>(
567 reinterpret_cast<intptr_t>(entry->value));
568 EntryInfo& entry_info = entries_.at(entry_index);
569 if (FLAG_heap_profiler_trace_objects) {
570 heap_objects.Add(HeapObjectInfo(obj,
571 static_cast<int>(entry_info.size)));
572 if (obj->Size() != static_cast<int>(entry_info.size))
573 ++untracked;
574 } else {
575 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
576 }
577 }
578 }
579 if (FLAG_heap_profiler_trace_objects) {
580 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
581 entries_map_.occupancy());
582 heap_objects.Sort(comparator);
583 int last_printed_object = -1;
584 bool print_next_object = false;
585 for (int i = 0; i < heap_objects.length(); ++i) {
586 const HeapObjectInfo& object_info = heap_objects[i];
587 if (!object_info.IsValid()) {
588 ++untracked;
589 if (last_printed_object != i - 1) {
590 if (i > 0) {
591 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
592 heap_objects[i - 1].Print();
593 }
594 }
595 object_info.Print();
596 last_printed_object = i;
597 print_next_object = true;
598 } else if (print_next_object) {
599 object_info.Print();
600 print_next_object = false;
601 last_printed_object = i;
602 }
603 }
604 if (last_printed_object < heap_objects.length() - 1) {
605 PrintF("Last %d objects were skipped\n",
606 heap_objects.length() - 1 - last_printed_object);
607 }
608 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
609 entries_map_.occupancy());
610 }
611 return untracked;
612 }
613
614
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)615 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
616 int64_t* timestamp_us) {
617 UpdateHeapObjectsMap();
618 time_intervals_.Add(TimeInterval(next_id_));
619 int prefered_chunk_size = stream->GetChunkSize();
620 List<v8::HeapStatsUpdate> stats_buffer;
621 DCHECK(!entries_.is_empty());
622 EntryInfo* entry_info = &entries_.first();
623 EntryInfo* end_entry_info = &entries_.last() + 1;
624 for (int time_interval_index = 0;
625 time_interval_index < time_intervals_.length();
626 ++time_interval_index) {
627 TimeInterval& time_interval = time_intervals_[time_interval_index];
628 SnapshotObjectId time_interval_id = time_interval.id;
629 uint32_t entries_size = 0;
630 EntryInfo* start_entry_info = entry_info;
631 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
632 entries_size += entry_info->size;
633 ++entry_info;
634 }
635 uint32_t entries_count =
636 static_cast<uint32_t>(entry_info - start_entry_info);
637 if (time_interval.count != entries_count ||
638 time_interval.size != entries_size) {
639 stats_buffer.Add(v8::HeapStatsUpdate(
640 time_interval_index,
641 time_interval.count = entries_count,
642 time_interval.size = entries_size));
643 if (stats_buffer.length() >= prefered_chunk_size) {
644 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
645 &stats_buffer.first(), stats_buffer.length());
646 if (result == OutputStream::kAbort) return last_assigned_id();
647 stats_buffer.Clear();
648 }
649 }
650 }
651 DCHECK(entry_info == end_entry_info);
652 if (!stats_buffer.is_empty()) {
653 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
654 &stats_buffer.first(), stats_buffer.length());
655 if (result == OutputStream::kAbort) return last_assigned_id();
656 }
657 stream->EndOfStream();
658 if (timestamp_us) {
659 *timestamp_us = (time_intervals_.last().timestamp -
660 time_intervals_[0].timestamp).InMicroseconds();
661 }
662 return last_assigned_id();
663 }
664
665
RemoveDeadEntries()666 void HeapObjectsMap::RemoveDeadEntries() {
667 DCHECK(entries_.length() > 0 &&
668 entries_.at(0).id == 0 &&
669 entries_.at(0).addr == NULL);
670 int first_free_entry = 1;
671 for (int i = 1; i < entries_.length(); ++i) {
672 EntryInfo& entry_info = entries_.at(i);
673 if (entry_info.accessed) {
674 if (first_free_entry != i) {
675 entries_.at(first_free_entry) = entry_info;
676 }
677 entries_.at(first_free_entry).accessed = false;
678 HashMap::Entry* entry = entries_map_.Lookup(
679 entry_info.addr, ComputePointerHash(entry_info.addr));
680 DCHECK(entry);
681 entry->value = reinterpret_cast<void*>(first_free_entry);
682 ++first_free_entry;
683 } else {
684 if (entry_info.addr) {
685 entries_map_.Remove(entry_info.addr,
686 ComputePointerHash(entry_info.addr));
687 }
688 }
689 }
690 entries_.Rewind(first_free_entry);
691 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
692 entries_map_.occupancy());
693 }
694
695
GenerateId(v8::RetainedObjectInfo * info)696 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
697 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
698 const char* label = info->GetLabel();
699 id ^= StringHasher::HashSequentialString(label,
700 static_cast<int>(strlen(label)),
701 heap_->HashSeed());
702 intptr_t element_count = info->GetElementCount();
703 if (element_count != -1)
704 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
705 v8::internal::kZeroHashSeed);
706 return id << 1;
707 }
708
709
GetUsedMemorySize() const710 size_t HeapObjectsMap::GetUsedMemorySize() const {
711 return
712 sizeof(*this) +
713 sizeof(HashMap::Entry) * entries_map_.capacity() +
714 GetMemoryUsedByList(entries_) +
715 GetMemoryUsedByList(time_intervals_);
716 }
717
718
HeapEntriesMap()719 HeapEntriesMap::HeapEntriesMap()
720 : entries_(HashMap::PointersMatch) {
721 }
722
723
Map(HeapThing thing)724 int HeapEntriesMap::Map(HeapThing thing) {
725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
726 if (cache_entry == NULL) return HeapEntry::kNoEntry;
727 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
728 }
729
730
Pair(HeapThing thing,int entry)731 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
732 HashMap::Entry* cache_entry = entries_.LookupOrInsert(thing, Hash(thing));
733 DCHECK(cache_entry->value == NULL);
734 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
735 }
736
737
HeapObjectsSet()738 HeapObjectsSet::HeapObjectsSet()
739 : entries_(HashMap::PointersMatch) {
740 }
741
742
Clear()743 void HeapObjectsSet::Clear() {
744 entries_.Clear();
745 }
746
747
Contains(Object * obj)748 bool HeapObjectsSet::Contains(Object* obj) {
749 if (!obj->IsHeapObject()) return false;
750 HeapObject* object = HeapObject::cast(obj);
751 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
752 }
753
754
Insert(Object * obj)755 void HeapObjectsSet::Insert(Object* obj) {
756 if (!obj->IsHeapObject()) return;
757 HeapObject* object = HeapObject::cast(obj);
758 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
759 }
760
761
GetTag(Object * obj)762 const char* HeapObjectsSet::GetTag(Object* obj) {
763 HeapObject* object = HeapObject::cast(obj);
764 HashMap::Entry* cache_entry =
765 entries_.Lookup(object, HeapEntriesMap::Hash(object));
766 return cache_entry != NULL
767 ? reinterpret_cast<const char*>(cache_entry->value)
768 : NULL;
769 }
770
771
SetTag(Object * obj,const char * tag)772 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
773 if (!obj->IsHeapObject()) return;
774 HeapObject* object = HeapObject::cast(obj);
775 HashMap::Entry* cache_entry =
776 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
777 cache_entry->value = const_cast<char*>(tag);
778 }
779
780
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress,v8::HeapProfiler::ObjectNameResolver * resolver)781 V8HeapExplorer::V8HeapExplorer(
782 HeapSnapshot* snapshot,
783 SnapshottingProgressReportingInterface* progress,
784 v8::HeapProfiler::ObjectNameResolver* resolver)
785 : heap_(snapshot->profiler()->heap_object_map()->heap()),
786 snapshot_(snapshot),
787 names_(snapshot_->profiler()->names()),
788 heap_object_map_(snapshot_->profiler()->heap_object_map()),
789 progress_(progress),
790 filler_(NULL),
791 global_object_name_resolver_(resolver) {
792 }
793
794
~V8HeapExplorer()795 V8HeapExplorer::~V8HeapExplorer() {
796 }
797
798
AllocateEntry(HeapThing ptr)799 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
800 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
801 }
802
803
AddEntry(HeapObject * object)804 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
805 if (object->IsJSFunction()) {
806 JSFunction* func = JSFunction::cast(object);
807 SharedFunctionInfo* shared = func->shared();
808 const char* name = names_->GetName(String::cast(shared->name()));
809 return AddEntry(object, HeapEntry::kClosure, name);
810 } else if (object->IsJSBoundFunction()) {
811 return AddEntry(object, HeapEntry::kClosure, "native_bind");
812 } else if (object->IsJSRegExp()) {
813 JSRegExp* re = JSRegExp::cast(object);
814 return AddEntry(object,
815 HeapEntry::kRegExp,
816 names_->GetName(re->Pattern()));
817 } else if (object->IsJSObject()) {
818 const char* name = names_->GetName(
819 GetConstructorName(JSObject::cast(object)));
820 if (object->IsJSGlobalObject()) {
821 const char* tag = objects_tags_.GetTag(object);
822 if (tag != NULL) {
823 name = names_->GetFormatted("%s / %s", name, tag);
824 }
825 }
826 return AddEntry(object, HeapEntry::kObject, name);
827 } else if (object->IsString()) {
828 String* string = String::cast(object);
829 if (string->IsConsString())
830 return AddEntry(object,
831 HeapEntry::kConsString,
832 "(concatenated string)");
833 if (string->IsSlicedString())
834 return AddEntry(object,
835 HeapEntry::kSlicedString,
836 "(sliced string)");
837 return AddEntry(object,
838 HeapEntry::kString,
839 names_->GetName(String::cast(object)));
840 } else if (object->IsSymbol()) {
841 if (Symbol::cast(object)->is_private())
842 return AddEntry(object, HeapEntry::kHidden, "private symbol");
843 else
844 return AddEntry(object, HeapEntry::kSymbol, "symbol");
845 } else if (object->IsCode()) {
846 return AddEntry(object, HeapEntry::kCode, "");
847 } else if (object->IsSharedFunctionInfo()) {
848 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
849 return AddEntry(object,
850 HeapEntry::kCode,
851 names_->GetName(name));
852 } else if (object->IsScript()) {
853 Object* name = Script::cast(object)->name();
854 return AddEntry(object,
855 HeapEntry::kCode,
856 name->IsString()
857 ? names_->GetName(String::cast(name))
858 : "");
859 } else if (object->IsNativeContext()) {
860 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
861 } else if (object->IsContext()) {
862 return AddEntry(object, HeapEntry::kObject, "system / Context");
863 } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
864 object->IsByteArray()) {
865 return AddEntry(object, HeapEntry::kArray, "");
866 } else if (object->IsHeapNumber()) {
867 return AddEntry(object, HeapEntry::kHeapNumber, "number");
868 } else if (object->IsSimd128Value()) {
869 return AddEntry(object, HeapEntry::kSimdValue, "simd");
870 }
871 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
872 }
873
874
AddEntry(HeapObject * object,HeapEntry::Type type,const char * name)875 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
876 HeapEntry::Type type,
877 const char* name) {
878 return AddEntry(object->address(), type, name, object->Size());
879 }
880
881
AddEntry(Address address,HeapEntry::Type type,const char * name,size_t size)882 HeapEntry* V8HeapExplorer::AddEntry(Address address,
883 HeapEntry::Type type,
884 const char* name,
885 size_t size) {
886 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
887 address, static_cast<unsigned int>(size));
888 unsigned trace_node_id = 0;
889 if (AllocationTracker* allocation_tracker =
890 snapshot_->profiler()->allocation_tracker()) {
891 trace_node_id =
892 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
893 }
894 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
895 }
896
897
898 class SnapshotFiller {
899 public:
SnapshotFiller(HeapSnapshot * snapshot,HeapEntriesMap * entries)900 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
901 : snapshot_(snapshot),
902 names_(snapshot->profiler()->names()),
903 entries_(entries) { }
AddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)904 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
905 HeapEntry* entry = allocator->AllocateEntry(ptr);
906 entries_->Pair(ptr, entry->index());
907 return entry;
908 }
FindEntry(HeapThing ptr)909 HeapEntry* FindEntry(HeapThing ptr) {
910 int index = entries_->Map(ptr);
911 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
912 }
FindOrAddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)913 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
914 HeapEntry* entry = FindEntry(ptr);
915 return entry != NULL ? entry : AddEntry(ptr, allocator);
916 }
SetIndexedReference(HeapGraphEdge::Type type,int parent,int index,HeapEntry * child_entry)917 void SetIndexedReference(HeapGraphEdge::Type type,
918 int parent,
919 int index,
920 HeapEntry* child_entry) {
921 HeapEntry* parent_entry = &snapshot_->entries()[parent];
922 parent_entry->SetIndexedReference(type, index, child_entry);
923 }
SetIndexedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)924 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
925 int parent,
926 HeapEntry* child_entry) {
927 HeapEntry* parent_entry = &snapshot_->entries()[parent];
928 int index = parent_entry->children_count() + 1;
929 parent_entry->SetIndexedReference(type, index, child_entry);
930 }
SetNamedReference(HeapGraphEdge::Type type,int parent,const char * reference_name,HeapEntry * child_entry)931 void SetNamedReference(HeapGraphEdge::Type type,
932 int parent,
933 const char* reference_name,
934 HeapEntry* child_entry) {
935 HeapEntry* parent_entry = &snapshot_->entries()[parent];
936 parent_entry->SetNamedReference(type, reference_name, child_entry);
937 }
SetNamedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)938 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
939 int parent,
940 HeapEntry* child_entry) {
941 HeapEntry* parent_entry = &snapshot_->entries()[parent];
942 int index = parent_entry->children_count() + 1;
943 parent_entry->SetNamedReference(
944 type,
945 names_->GetName(index),
946 child_entry);
947 }
948
949 private:
950 HeapSnapshot* snapshot_;
951 StringsStorage* names_;
952 HeapEntriesMap* entries_;
953 };
954
955
GetSystemEntryName(HeapObject * object)956 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
957 switch (object->map()->instance_type()) {
958 case MAP_TYPE:
959 switch (Map::cast(object)->instance_type()) {
960 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
961 case instance_type: return "system / Map (" #Name ")";
962 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
963 #undef MAKE_STRING_MAP_CASE
964 default: return "system / Map";
965 }
966 case CELL_TYPE: return "system / Cell";
967 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
968 case FOREIGN_TYPE: return "system / Foreign";
969 case ODDBALL_TYPE: return "system / Oddball";
970 #define MAKE_STRUCT_CASE(NAME, Name, name) \
971 case NAME##_TYPE: return "system / "#Name;
972 STRUCT_LIST(MAKE_STRUCT_CASE)
973 #undef MAKE_STRUCT_CASE
974 default: return "system";
975 }
976 }
977
978
EstimateObjectsCount(HeapIterator * iterator)979 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
980 int objects_count = 0;
981 for (HeapObject* obj = iterator->next();
982 obj != NULL;
983 obj = iterator->next()) {
984 objects_count++;
985 }
986 return objects_count;
987 }
988
989
990 class IndexedReferencesExtractor : public ObjectVisitor {
991 public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject * parent_obj,int parent)992 IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
993 int parent)
994 : generator_(generator),
995 parent_obj_(parent_obj),
996 parent_start_(HeapObject::RawField(parent_obj_, 0)),
997 parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())),
998 parent_(parent),
999 next_index_(0) {}
VisitCodeEntry(Address entry_address)1000 void VisitCodeEntry(Address entry_address) override {
1001 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1002 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1003 generator_->TagCodeObject(code);
1004 }
VisitPointers(Object ** start,Object ** end)1005 void VisitPointers(Object** start, Object** end) override {
1006 for (Object** p = start; p < end; p++) {
1007 intptr_t index =
1008 static_cast<intptr_t>(p - HeapObject::RawField(parent_obj_, 0));
1009 ++next_index_;
1010 // |p| could be outside of the object, e.g., while visiting RelocInfo of
1011 // code objects.
1012 if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
1013 generator_->marks_[index] = false;
1014 continue;
1015 }
1016 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1017 }
1018 }
1019
1020 private:
1021 V8HeapExplorer* generator_;
1022 HeapObject* parent_obj_;
1023 Object** parent_start_;
1024 Object** parent_end_;
1025 int parent_;
1026 int next_index_;
1027 };
1028
1029
ExtractReferencesPass1(int entry,HeapObject * obj)1030 bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1031 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1032
1033 if (obj->IsJSGlobalProxy()) {
1034 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1035 } else if (obj->IsJSArrayBuffer()) {
1036 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1037 } else if (obj->IsJSObject()) {
1038 if (obj->IsJSWeakSet()) {
1039 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1040 } else if (obj->IsJSWeakMap()) {
1041 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1042 } else if (obj->IsJSSet()) {
1043 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1044 } else if (obj->IsJSMap()) {
1045 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1046 }
1047 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1048 } else if (obj->IsString()) {
1049 ExtractStringReferences(entry, String::cast(obj));
1050 } else if (obj->IsSymbol()) {
1051 ExtractSymbolReferences(entry, Symbol::cast(obj));
1052 } else if (obj->IsMap()) {
1053 ExtractMapReferences(entry, Map::cast(obj));
1054 } else if (obj->IsSharedFunctionInfo()) {
1055 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1056 } else if (obj->IsScript()) {
1057 ExtractScriptReferences(entry, Script::cast(obj));
1058 } else if (obj->IsAccessorInfo()) {
1059 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1060 } else if (obj->IsAccessorPair()) {
1061 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1062 } else if (obj->IsCodeCache()) {
1063 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1064 } else if (obj->IsCode()) {
1065 ExtractCodeReferences(entry, Code::cast(obj));
1066 } else if (obj->IsBox()) {
1067 ExtractBoxReferences(entry, Box::cast(obj));
1068 } else if (obj->IsCell()) {
1069 ExtractCellReferences(entry, Cell::cast(obj));
1070 } else if (obj->IsPropertyCell()) {
1071 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1072 } else if (obj->IsAllocationSite()) {
1073 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1074 }
1075 return true;
1076 }
1077
1078
ExtractReferencesPass2(int entry,HeapObject * obj)1079 bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1080 if (!obj->IsFixedArray()) return false;
1081
1082 if (obj->IsContext()) {
1083 ExtractContextReferences(entry, Context::cast(obj));
1084 } else {
1085 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1086 }
1087 return true;
1088 }
1089
1090
ExtractJSGlobalProxyReferences(int entry,JSGlobalProxy * proxy)1091 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1092 int entry, JSGlobalProxy* proxy) {
1093 SetInternalReference(proxy, entry,
1094 "native_context", proxy->native_context(),
1095 JSGlobalProxy::kNativeContextOffset);
1096 }
1097
1098
ExtractJSObjectReferences(int entry,JSObject * js_obj)1099 void V8HeapExplorer::ExtractJSObjectReferences(
1100 int entry, JSObject* js_obj) {
1101 HeapObject* obj = js_obj;
1102 ExtractPropertyReferences(js_obj, entry);
1103 ExtractElementReferences(js_obj, entry);
1104 ExtractInternalReferences(js_obj, entry);
1105 PrototypeIterator iter(heap_->isolate(), js_obj);
1106 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1107 if (obj->IsJSBoundFunction()) {
1108 JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
1109 TagObject(js_fun->bound_arguments(), "(bound arguments)");
1110 SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(),
1111 JSBoundFunction::kBoundArgumentsOffset);
1112 TagObject(js_fun->creation_context(), "(creation context)");
1113 SetInternalReference(js_fun, entry, "creation_context",
1114 js_fun->creation_context(),
1115 JSBoundFunction::kCreationContextOffset);
1116 SetNativeBindReference(js_obj, entry, "bound_this", js_fun->bound_this());
1117 SetNativeBindReference(js_obj, entry, "bound_function",
1118 js_fun->bound_target_function());
1119 FixedArray* bindings = js_fun->bound_arguments();
1120 for (int i = 0; i < bindings->length(); i++) {
1121 const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1122 SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i));
1123 }
1124 } else if (obj->IsJSFunction()) {
1125 JSFunction* js_fun = JSFunction::cast(js_obj);
1126 Object* proto_or_map = js_fun->prototype_or_initial_map();
1127 if (!proto_or_map->IsTheHole()) {
1128 if (!proto_or_map->IsMap()) {
1129 SetPropertyReference(
1130 obj, entry,
1131 heap_->prototype_string(), proto_or_map,
1132 NULL,
1133 JSFunction::kPrototypeOrInitialMapOffset);
1134 } else {
1135 SetPropertyReference(
1136 obj, entry,
1137 heap_->prototype_string(), js_fun->prototype());
1138 SetInternalReference(
1139 obj, entry, "initial_map", proto_or_map,
1140 JSFunction::kPrototypeOrInitialMapOffset);
1141 }
1142 }
1143 SharedFunctionInfo* shared_info = js_fun->shared();
1144 TagObject(js_fun->literals(), "(function literals)");
1145 SetInternalReference(js_fun, entry, "literals", js_fun->literals(),
1146 JSFunction::kLiteralsOffset);
1147 TagObject(shared_info, "(shared function info)");
1148 SetInternalReference(js_fun, entry,
1149 "shared", shared_info,
1150 JSFunction::kSharedFunctionInfoOffset);
1151 TagObject(js_fun->context(), "(context)");
1152 SetInternalReference(js_fun, entry,
1153 "context", js_fun->context(),
1154 JSFunction::kContextOffset);
1155 SetWeakReference(js_fun, entry,
1156 "next_function_link", js_fun->next_function_link(),
1157 JSFunction::kNextFunctionLinkOffset);
1158 // Ensure no new weak references appeared in JSFunction.
1159 STATIC_ASSERT(JSFunction::kCodeEntryOffset ==
1160 JSFunction::kNonWeakFieldsEndOffset);
1161 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
1162 JSFunction::kNextFunctionLinkOffset);
1163 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1164 == JSFunction::kSize);
1165 } else if (obj->IsJSGlobalObject()) {
1166 JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
1167 SetInternalReference(global_obj, entry, "native_context",
1168 global_obj->native_context(),
1169 JSGlobalObject::kNativeContextOffset);
1170 SetInternalReference(global_obj, entry, "global_proxy",
1171 global_obj->global_proxy(),
1172 JSGlobalObject::kGlobalProxyOffset);
1173 STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
1174 2 * kPointerSize);
1175 } else if (obj->IsJSArrayBufferView()) {
1176 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1177 SetInternalReference(view, entry, "buffer", view->buffer(),
1178 JSArrayBufferView::kBufferOffset);
1179 }
1180 TagObject(js_obj->properties(), "(object properties)");
1181 SetInternalReference(obj, entry,
1182 "properties", js_obj->properties(),
1183 JSObject::kPropertiesOffset);
1184 TagObject(js_obj->elements(), "(object elements)");
1185 SetInternalReference(obj, entry,
1186 "elements", js_obj->elements(),
1187 JSObject::kElementsOffset);
1188 }
1189
1190
ExtractStringReferences(int entry,String * string)1191 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1192 if (string->IsConsString()) {
1193 ConsString* cs = ConsString::cast(string);
1194 SetInternalReference(cs, entry, "first", cs->first(),
1195 ConsString::kFirstOffset);
1196 SetInternalReference(cs, entry, "second", cs->second(),
1197 ConsString::kSecondOffset);
1198 } else if (string->IsSlicedString()) {
1199 SlicedString* ss = SlicedString::cast(string);
1200 SetInternalReference(ss, entry, "parent", ss->parent(),
1201 SlicedString::kParentOffset);
1202 }
1203 }
1204
1205
ExtractSymbolReferences(int entry,Symbol * symbol)1206 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1207 SetInternalReference(symbol, entry,
1208 "name", symbol->name(),
1209 Symbol::kNameOffset);
1210 }
1211
1212
ExtractJSCollectionReferences(int entry,JSCollection * collection)1213 void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1214 JSCollection* collection) {
1215 SetInternalReference(collection, entry, "table", collection->table(),
1216 JSCollection::kTableOffset);
1217 }
1218
1219
ExtractJSWeakCollectionReferences(int entry,JSWeakCollection * collection)1220 void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1221 int entry, JSWeakCollection* collection) {
1222 MarkAsWeakContainer(collection->table());
1223 SetInternalReference(collection, entry,
1224 "table", collection->table(),
1225 JSWeakCollection::kTableOffset);
1226 }
1227
1228
ExtractContextReferences(int entry,Context * context)1229 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1230 if (context == context->declaration_context()) {
1231 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1232 // Add context allocated locals.
1233 int context_locals = scope_info->ContextLocalCount();
1234 for (int i = 0; i < context_locals; ++i) {
1235 String* local_name = scope_info->ContextLocalName(i);
1236 int idx = Context::MIN_CONTEXT_SLOTS + i;
1237 SetContextReference(context, entry, local_name, context->get(idx),
1238 Context::OffsetOfElementAt(idx));
1239 }
1240 if (scope_info->HasFunctionName()) {
1241 String* name = scope_info->FunctionName();
1242 VariableMode mode;
1243 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1244 if (idx >= 0) {
1245 SetContextReference(context, entry, name, context->get(idx),
1246 Context::OffsetOfElementAt(idx));
1247 }
1248 }
1249 }
1250
1251 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1252 if (Context::index < Context::FIRST_WEAK_SLOT || \
1253 Context::index == Context::MAP_CACHE_INDEX) { \
1254 SetInternalReference(context, entry, #name, context->get(Context::index), \
1255 FixedArray::OffsetOfElementAt(Context::index)); \
1256 } else { \
1257 SetWeakReference(context, entry, #name, context->get(Context::index), \
1258 FixedArray::OffsetOfElementAt(Context::index)); \
1259 }
1260 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1261 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1262 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension);
1263 EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context);
1264 if (context->IsNativeContext()) {
1265 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1266 TagObject(context->embedder_data(), "(context data)");
1267 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD)
1268 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1269 optimized_functions_list);
1270 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1271 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1272 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1273 #undef EXTRACT_CONTEXT_FIELD
1274 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1275 Context::FIRST_WEAK_SLOT);
1276 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1277 Context::NATIVE_CONTEXT_SLOTS);
1278 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1279 Context::NATIVE_CONTEXT_SLOTS);
1280 }
1281 }
1282
1283
ExtractMapReferences(int entry,Map * map)1284 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1285 Object* raw_transitions_or_prototype_info = map->raw_transitions();
1286 if (TransitionArray::IsFullTransitionArray(
1287 raw_transitions_or_prototype_info)) {
1288 TransitionArray* transitions =
1289 TransitionArray::cast(raw_transitions_or_prototype_info);
1290 int transitions_entry = GetEntry(transitions)->index();
1291
1292 if (map->CanTransition()) {
1293 if (transitions->HasPrototypeTransitions()) {
1294 FixedArray* prototype_transitions =
1295 transitions->GetPrototypeTransitions();
1296 MarkAsWeakContainer(prototype_transitions);
1297 TagObject(prototype_transitions, "(prototype transitions");
1298 SetInternalReference(transitions, transitions_entry,
1299 "prototype_transitions", prototype_transitions);
1300 }
1301 // TODO(alph): transitions keys are strong links.
1302 MarkAsWeakContainer(transitions);
1303 }
1304
1305 TagObject(transitions, "(transition array)");
1306 SetInternalReference(map, entry, "transitions", transitions,
1307 Map::kTransitionsOrPrototypeInfoOffset);
1308 } else if (TransitionArray::IsSimpleTransition(
1309 raw_transitions_or_prototype_info)) {
1310 TagObject(raw_transitions_or_prototype_info, "(transition)");
1311 SetInternalReference(map, entry, "transition",
1312 raw_transitions_or_prototype_info,
1313 Map::kTransitionsOrPrototypeInfoOffset);
1314 } else if (map->is_prototype_map()) {
1315 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1316 SetInternalReference(map, entry, "prototype_info",
1317 raw_transitions_or_prototype_info,
1318 Map::kTransitionsOrPrototypeInfoOffset);
1319 }
1320 DescriptorArray* descriptors = map->instance_descriptors();
1321 TagObject(descriptors, "(map descriptors)");
1322 SetInternalReference(map, entry,
1323 "descriptors", descriptors,
1324 Map::kDescriptorsOffset);
1325
1326 MarkAsWeakContainer(map->code_cache());
1327 SetInternalReference(map, entry,
1328 "code_cache", map->code_cache(),
1329 Map::kCodeCacheOffset);
1330 SetInternalReference(map, entry,
1331 "prototype", map->prototype(), Map::kPrototypeOffset);
1332 Object* constructor_or_backpointer = map->constructor_or_backpointer();
1333 if (constructor_or_backpointer->IsMap()) {
1334 TagObject(constructor_or_backpointer, "(back pointer)");
1335 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1336 Map::kConstructorOrBackPointerOffset);
1337 } else {
1338 SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1339 Map::kConstructorOrBackPointerOffset);
1340 }
1341 TagObject(map->dependent_code(), "(dependent code)");
1342 MarkAsWeakContainer(map->dependent_code());
1343 SetInternalReference(map, entry,
1344 "dependent_code", map->dependent_code(),
1345 Map::kDependentCodeOffset);
1346 }
1347
1348
ExtractSharedFunctionInfoReferences(int entry,SharedFunctionInfo * shared)1349 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1350 int entry, SharedFunctionInfo* shared) {
1351 HeapObject* obj = shared;
1352 String* shared_name = shared->DebugName();
1353 const char* name = NULL;
1354 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1355 name = names_->GetName(shared_name);
1356 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1357 } else {
1358 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1359 Code::Kind2String(shared->code()->kind())));
1360 }
1361
1362 SetInternalReference(obj, entry,
1363 "name", shared->name(),
1364 SharedFunctionInfo::kNameOffset);
1365 SetInternalReference(obj, entry,
1366 "code", shared->code(),
1367 SharedFunctionInfo::kCodeOffset);
1368 TagObject(shared->scope_info(), "(function scope info)");
1369 SetInternalReference(obj, entry,
1370 "scope_info", shared->scope_info(),
1371 SharedFunctionInfo::kScopeInfoOffset);
1372 SetInternalReference(obj, entry,
1373 "instance_class_name", shared->instance_class_name(),
1374 SharedFunctionInfo::kInstanceClassNameOffset);
1375 SetInternalReference(obj, entry,
1376 "script", shared->script(),
1377 SharedFunctionInfo::kScriptOffset);
1378 const char* construct_stub_name = name ?
1379 names_->GetFormatted("(construct stub code for %s)", name) :
1380 "(construct stub code)";
1381 TagObject(shared->construct_stub(), construct_stub_name);
1382 SetInternalReference(obj, entry,
1383 "construct_stub", shared->construct_stub(),
1384 SharedFunctionInfo::kConstructStubOffset);
1385 SetInternalReference(obj, entry,
1386 "function_data", shared->function_data(),
1387 SharedFunctionInfo::kFunctionDataOffset);
1388 SetInternalReference(obj, entry,
1389 "debug_info", shared->debug_info(),
1390 SharedFunctionInfo::kDebugInfoOffset);
1391 SetInternalReference(obj, entry,
1392 "inferred_name", shared->inferred_name(),
1393 SharedFunctionInfo::kInferredNameOffset);
1394 SetInternalReference(obj, entry,
1395 "optimized_code_map", shared->optimized_code_map(),
1396 SharedFunctionInfo::kOptimizedCodeMapOffset);
1397 SetInternalReference(obj, entry,
1398 "feedback_vector", shared->feedback_vector(),
1399 SharedFunctionInfo::kFeedbackVectorOffset);
1400 }
1401
1402
ExtractScriptReferences(int entry,Script * script)1403 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1404 HeapObject* obj = script;
1405 SetInternalReference(obj, entry,
1406 "source", script->source(),
1407 Script::kSourceOffset);
1408 SetInternalReference(obj, entry,
1409 "name", script->name(),
1410 Script::kNameOffset);
1411 SetInternalReference(obj, entry,
1412 "context_data", script->context_data(),
1413 Script::kContextOffset);
1414 TagObject(script->line_ends(), "(script line ends)");
1415 SetInternalReference(obj, entry,
1416 "line_ends", script->line_ends(),
1417 Script::kLineEndsOffset);
1418 }
1419
1420
ExtractAccessorInfoReferences(int entry,AccessorInfo * accessor_info)1421 void V8HeapExplorer::ExtractAccessorInfoReferences(
1422 int entry, AccessorInfo* accessor_info) {
1423 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1424 AccessorInfo::kNameOffset);
1425 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1426 accessor_info->expected_receiver_type(),
1427 AccessorInfo::kExpectedReceiverTypeOffset);
1428 if (accessor_info->IsExecutableAccessorInfo()) {
1429 ExecutableAccessorInfo* executable_accessor_info =
1430 ExecutableAccessorInfo::cast(accessor_info);
1431 SetInternalReference(executable_accessor_info, entry, "getter",
1432 executable_accessor_info->getter(),
1433 ExecutableAccessorInfo::kGetterOffset);
1434 SetInternalReference(executable_accessor_info, entry, "setter",
1435 executable_accessor_info->setter(),
1436 ExecutableAccessorInfo::kSetterOffset);
1437 SetInternalReference(executable_accessor_info, entry, "data",
1438 executable_accessor_info->data(),
1439 ExecutableAccessorInfo::kDataOffset);
1440 }
1441 }
1442
1443
ExtractAccessorPairReferences(int entry,AccessorPair * accessors)1444 void V8HeapExplorer::ExtractAccessorPairReferences(
1445 int entry, AccessorPair* accessors) {
1446 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1447 AccessorPair::kGetterOffset);
1448 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1449 AccessorPair::kSetterOffset);
1450 }
1451
1452
ExtractCodeCacheReferences(int entry,CodeCache * code_cache)1453 void V8HeapExplorer::ExtractCodeCacheReferences(
1454 int entry, CodeCache* code_cache) {
1455 TagObject(code_cache->default_cache(), "(default code cache)");
1456 SetInternalReference(code_cache, entry,
1457 "default_cache", code_cache->default_cache(),
1458 CodeCache::kDefaultCacheOffset);
1459 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1460 SetInternalReference(code_cache, entry,
1461 "type_cache", code_cache->normal_type_cache(),
1462 CodeCache::kNormalTypeCacheOffset);
1463 }
1464
1465
TagBuiltinCodeObject(Code * code,const char * name)1466 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1467 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1468 }
1469
1470
TagCodeObject(Code * code)1471 void V8HeapExplorer::TagCodeObject(Code* code) {
1472 if (code->kind() == Code::STUB) {
1473 TagObject(code, names_->GetFormatted(
1474 "(%s code)",
1475 CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1476 }
1477 }
1478
1479
ExtractCodeReferences(int entry,Code * code)1480 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1481 TagCodeObject(code);
1482 TagObject(code->relocation_info(), "(code relocation info)");
1483 SetInternalReference(code, entry,
1484 "relocation_info", code->relocation_info(),
1485 Code::kRelocationInfoOffset);
1486 SetInternalReference(code, entry,
1487 "handler_table", code->handler_table(),
1488 Code::kHandlerTableOffset);
1489 TagObject(code->deoptimization_data(), "(code deopt data)");
1490 SetInternalReference(code, entry,
1491 "deoptimization_data", code->deoptimization_data(),
1492 Code::kDeoptimizationDataOffset);
1493 if (code->kind() == Code::FUNCTION) {
1494 SetInternalReference(code, entry,
1495 "type_feedback_info", code->type_feedback_info(),
1496 Code::kTypeFeedbackInfoOffset);
1497 }
1498 SetInternalReference(code, entry,
1499 "gc_metadata", code->gc_metadata(),
1500 Code::kGCMetadataOffset);
1501 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1502 SetWeakReference(code, entry,
1503 "next_code_link", code->next_code_link(),
1504 Code::kNextCodeLinkOffset);
1505 }
1506 }
1507
1508
ExtractBoxReferences(int entry,Box * box)1509 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1510 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1511 }
1512
1513
ExtractCellReferences(int entry,Cell * cell)1514 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1515 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1516 }
1517
1518
ExtractPropertyCellReferences(int entry,PropertyCell * cell)1519 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1520 PropertyCell* cell) {
1521 SetInternalReference(cell, entry, "value", cell->value(),
1522 PropertyCell::kValueOffset);
1523 MarkAsWeakContainer(cell->dependent_code());
1524 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1525 PropertyCell::kDependentCodeOffset);
1526 }
1527
1528
ExtractAllocationSiteReferences(int entry,AllocationSite * site)1529 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1530 AllocationSite* site) {
1531 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1532 AllocationSite::kTransitionInfoOffset);
1533 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1534 AllocationSite::kNestedSiteOffset);
1535 MarkAsWeakContainer(site->dependent_code());
1536 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1537 AllocationSite::kDependentCodeOffset);
1538 // Do not visit weak_next as it is not visited by the StaticVisitor,
1539 // and we're not very interested in weak_next field here.
1540 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1541 AllocationSite::BodyDescriptor::kEndOffset);
1542 }
1543
1544
1545 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1546 public:
JSArrayBufferDataEntryAllocator(size_t size,V8HeapExplorer * explorer)1547 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1548 : size_(size)
1549 , explorer_(explorer) {
1550 }
AllocateEntry(HeapThing ptr)1551 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1552 return explorer_->AddEntry(
1553 static_cast<Address>(ptr),
1554 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1555 }
1556 private:
1557 size_t size_;
1558 V8HeapExplorer* explorer_;
1559 };
1560
1561
ExtractJSArrayBufferReferences(int entry,JSArrayBuffer * buffer)1562 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1563 int entry, JSArrayBuffer* buffer) {
1564 // Setup a reference to a native memory backing_store object.
1565 if (!buffer->backing_store())
1566 return;
1567 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1568 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1569 HeapEntry* data_entry =
1570 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1571 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1572 entry, "backing_store", data_entry);
1573 }
1574
1575
ExtractFixedArrayReferences(int entry,FixedArray * array)1576 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1577 bool is_weak = weak_containers_.Contains(array);
1578 for (int i = 0, l = array->length(); i < l; ++i) {
1579 if (is_weak) {
1580 SetWeakReference(array, entry,
1581 i, array->get(i), array->OffsetOfElementAt(i));
1582 } else {
1583 SetInternalReference(array, entry,
1584 i, array->get(i), array->OffsetOfElementAt(i));
1585 }
1586 }
1587 }
1588
1589
ExtractPropertyReferences(JSObject * js_obj,int entry)1590 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1591 if (js_obj->HasFastProperties()) {
1592 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1593 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1594 for (int i = 0; i < real_size; i++) {
1595 PropertyDetails details = descs->GetDetails(i);
1596 switch (details.location()) {
1597 case kField: {
1598 Representation r = details.representation();
1599 if (r.IsSmi() || r.IsDouble()) break;
1600
1601 Name* k = descs->GetKey(i);
1602 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1603 Object* value = js_obj->RawFastPropertyAt(field_index);
1604 int field_offset =
1605 field_index.is_inobject() ? field_index.offset() : -1;
1606
1607 if (k != heap_->hidden_string()) {
1608 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1609 value, NULL, field_offset);
1610 } else {
1611 TagObject(value, "(hidden properties)");
1612 SetInternalReference(js_obj, entry, "hidden_properties", value,
1613 field_offset);
1614 }
1615 break;
1616 }
1617 case kDescriptor:
1618 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1619 descs->GetKey(i),
1620 descs->GetValue(i));
1621 break;
1622 }
1623 }
1624 } else if (js_obj->IsJSGlobalObject()) {
1625 // We assume that global objects can only have slow properties.
1626 GlobalDictionary* dictionary = js_obj->global_dictionary();
1627 int length = dictionary->Capacity();
1628 for (int i = 0; i < length; ++i) {
1629 Object* k = dictionary->KeyAt(i);
1630 if (dictionary->IsKey(k)) {
1631 DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1632 PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1633 Object* value = cell->value();
1634 if (k == heap_->hidden_string()) {
1635 TagObject(value, "(hidden properties)");
1636 SetInternalReference(js_obj, entry, "hidden_properties", value);
1637 continue;
1638 }
1639 PropertyDetails details = cell->property_details();
1640 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1641 Name::cast(k), value);
1642 }
1643 }
1644 } else {
1645 NameDictionary* dictionary = js_obj->property_dictionary();
1646 int length = dictionary->Capacity();
1647 for (int i = 0; i < length; ++i) {
1648 Object* k = dictionary->KeyAt(i);
1649 if (dictionary->IsKey(k)) {
1650 Object* value = dictionary->ValueAt(i);
1651 if (k == heap_->hidden_string()) {
1652 TagObject(value, "(hidden properties)");
1653 SetInternalReference(js_obj, entry, "hidden_properties", value);
1654 continue;
1655 }
1656 PropertyDetails details = dictionary->DetailsAt(i);
1657 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1658 Name::cast(k), value);
1659 }
1660 }
1661 }
1662 }
1663
1664
ExtractAccessorPairProperty(JSObject * js_obj,int entry,Name * key,Object * callback_obj,int field_offset)1665 void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1666 Name* key,
1667 Object* callback_obj,
1668 int field_offset) {
1669 if (!callback_obj->IsAccessorPair()) return;
1670 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1671 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1672 Object* getter = accessors->getter();
1673 if (!getter->IsOddball()) {
1674 SetPropertyReference(js_obj, entry, key, getter, "get %s");
1675 }
1676 Object* setter = accessors->setter();
1677 if (!setter->IsOddball()) {
1678 SetPropertyReference(js_obj, entry, key, setter, "set %s");
1679 }
1680 }
1681
1682
ExtractElementReferences(JSObject * js_obj,int entry)1683 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1684 if (js_obj->HasFastObjectElements()) {
1685 FixedArray* elements = FixedArray::cast(js_obj->elements());
1686 int length = js_obj->IsJSArray() ?
1687 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1688 elements->length();
1689 for (int i = 0; i < length; ++i) {
1690 if (!elements->get(i)->IsTheHole()) {
1691 SetElementReference(js_obj, entry, i, elements->get(i));
1692 }
1693 }
1694 } else if (js_obj->HasDictionaryElements()) {
1695 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1696 int length = dictionary->Capacity();
1697 for (int i = 0; i < length; ++i) {
1698 Object* k = dictionary->KeyAt(i);
1699 if (dictionary->IsKey(k)) {
1700 DCHECK(k->IsNumber());
1701 uint32_t index = static_cast<uint32_t>(k->Number());
1702 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1703 }
1704 }
1705 }
1706 }
1707
1708
ExtractInternalReferences(JSObject * js_obj,int entry)1709 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1710 int length = js_obj->GetInternalFieldCount();
1711 for (int i = 0; i < length; ++i) {
1712 Object* o = js_obj->GetInternalField(i);
1713 SetInternalReference(
1714 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1715 }
1716 }
1717
1718
GetConstructorName(JSObject * object)1719 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1720 Isolate* isolate = object->GetIsolate();
1721 if (object->IsJSFunction()) return isolate->heap()->closure_string();
1722 DisallowHeapAllocation no_gc;
1723 HandleScope scope(isolate);
1724 return *JSReceiver::GetConstructorName(handle(object, isolate));
1725 }
1726
1727
GetEntry(Object * obj)1728 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1729 if (!obj->IsHeapObject()) return NULL;
1730 return filler_->FindOrAddEntry(obj, this);
1731 }
1732
1733
1734 class RootsReferencesExtractor : public ObjectVisitor {
1735 private:
1736 struct IndexTag {
IndexTagv8::internal::RootsReferencesExtractor::IndexTag1737 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1738 : index(index), tag(tag) { }
1739 int index;
1740 VisitorSynchronization::SyncTag tag;
1741 };
1742
1743 public:
RootsReferencesExtractor(Heap * heap)1744 explicit RootsReferencesExtractor(Heap* heap)
1745 : collecting_all_references_(false),
1746 previous_reference_count_(0),
1747 heap_(heap) {
1748 }
1749
VisitPointers(Object ** start,Object ** end)1750 void VisitPointers(Object** start, Object** end) override {
1751 if (collecting_all_references_) {
1752 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1753 } else {
1754 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1755 }
1756 }
1757
SetCollectingAllReferences()1758 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1759
FillReferences(V8HeapExplorer * explorer)1760 void FillReferences(V8HeapExplorer* explorer) {
1761 DCHECK(strong_references_.length() <= all_references_.length());
1762 Builtins* builtins = heap_->isolate()->builtins();
1763 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1764 while (all_index < all_references_.length()) {
1765 bool is_strong = strong_index < strong_references_.length()
1766 && strong_references_[strong_index] == all_references_[all_index];
1767 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1768 !is_strong,
1769 all_references_[all_index]);
1770 if (reference_tags_[tags_index].tag ==
1771 VisitorSynchronization::kBuiltins) {
1772 DCHECK(all_references_[all_index]->IsCode());
1773 explorer->TagBuiltinCodeObject(
1774 Code::cast(all_references_[all_index]),
1775 builtins->name(builtin_index++));
1776 }
1777 ++all_index;
1778 if (is_strong) ++strong_index;
1779 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1780 }
1781 }
1782
Synchronize(VisitorSynchronization::SyncTag tag)1783 void Synchronize(VisitorSynchronization::SyncTag tag) override {
1784 if (collecting_all_references_ &&
1785 previous_reference_count_ != all_references_.length()) {
1786 previous_reference_count_ = all_references_.length();
1787 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1788 }
1789 }
1790
1791 private:
1792 bool collecting_all_references_;
1793 List<Object*> strong_references_;
1794 List<Object*> all_references_;
1795 int previous_reference_count_;
1796 List<IndexTag> reference_tags_;
1797 Heap* heap_;
1798 };
1799
1800
IterateAndExtractReferences(SnapshotFiller * filler)1801 bool V8HeapExplorer::IterateAndExtractReferences(
1802 SnapshotFiller* filler) {
1803 filler_ = filler;
1804
1805 // Create references to the synthetic roots.
1806 SetRootGcRootsReference();
1807 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1808 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1809 }
1810
1811 // Make sure builtin code objects get their builtin tags
1812 // first. Otherwise a particular JSFunction object could set
1813 // its custom name to a generic builtin.
1814 RootsReferencesExtractor extractor(heap_);
1815 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1816 extractor.SetCollectingAllReferences();
1817 heap_->IterateRoots(&extractor, VISIT_ALL);
1818 extractor.FillReferences(this);
1819
1820 // We have to do two passes as sometimes FixedArrays are used
1821 // to weakly hold their items, and it's impossible to distinguish
1822 // between these cases without processing the array owner first.
1823 bool interrupted =
1824 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1825 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1826
1827 if (interrupted) {
1828 filler_ = NULL;
1829 return false;
1830 }
1831
1832 filler_ = NULL;
1833 return progress_->ProgressReport(true);
1834 }
1835
1836
1837 template<V8HeapExplorer::ExtractReferencesMethod extractor>
IterateAndExtractSinglePass()1838 bool V8HeapExplorer::IterateAndExtractSinglePass() {
1839 // Now iterate the whole heap.
1840 bool interrupted = false;
1841 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1842 // Heap iteration with filtering must be finished in any case.
1843 for (HeapObject* obj = iterator.next();
1844 obj != NULL;
1845 obj = iterator.next(), progress_->ProgressStep()) {
1846 if (interrupted) continue;
1847
1848 size_t max_pointer = obj->Size() / kPointerSize;
1849 if (max_pointer > marks_.size()) {
1850 // Clear the current bits.
1851 std::vector<bool>().swap(marks_);
1852 // Reallocate to right size.
1853 marks_.resize(max_pointer, false);
1854 }
1855
1856 HeapEntry* heap_entry = GetEntry(obj);
1857 int entry = heap_entry->index();
1858 if ((this->*extractor)(entry, obj)) {
1859 SetInternalReference(obj, entry,
1860 "map", obj->map(), HeapObject::kMapOffset);
1861 // Extract unvisited fields as hidden references and restore tags
1862 // of visited fields.
1863 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1864 obj->Iterate(&refs_extractor);
1865 }
1866
1867 if (!progress_->ProgressReport(false)) interrupted = true;
1868 }
1869 return interrupted;
1870 }
1871
1872
IsEssentialObject(Object * object)1873 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1874 return object->IsHeapObject() && !object->IsOddball() &&
1875 object != heap_->empty_byte_array() &&
1876 object != heap_->empty_bytecode_array() &&
1877 object != heap_->empty_fixed_array() &&
1878 object != heap_->empty_descriptor_array() &&
1879 object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1880 object != heap_->global_property_cell_map() &&
1881 object != heap_->shared_function_info_map() &&
1882 object != heap_->free_space_map() &&
1883 object != heap_->one_pointer_filler_map() &&
1884 object != heap_->two_pointer_filler_map();
1885 }
1886
1887
SetContextReference(HeapObject * parent_obj,int parent_entry,String * reference_name,Object * child_obj,int field_offset)1888 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1889 int parent_entry,
1890 String* reference_name,
1891 Object* child_obj,
1892 int field_offset) {
1893 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1894 HeapEntry* child_entry = GetEntry(child_obj);
1895 if (child_entry != NULL) {
1896 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1897 parent_entry,
1898 names_->GetName(reference_name),
1899 child_entry);
1900 MarkVisitedField(parent_obj, field_offset);
1901 }
1902 }
1903
1904
MarkVisitedField(HeapObject * obj,int offset)1905 void V8HeapExplorer::MarkVisitedField(HeapObject* obj, int offset) {
1906 if (offset < 0) return;
1907 int index = offset / kPointerSize;
1908 DCHECK(!marks_[index]);
1909 marks_[index] = true;
1910 }
1911
1912
SetNativeBindReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj)1913 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1914 int parent_entry,
1915 const char* reference_name,
1916 Object* child_obj) {
1917 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1918 HeapEntry* child_entry = GetEntry(child_obj);
1919 if (child_entry != NULL) {
1920 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1921 parent_entry,
1922 reference_name,
1923 child_entry);
1924 }
1925 }
1926
1927
SetElementReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj)1928 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1929 int parent_entry,
1930 int index,
1931 Object* child_obj) {
1932 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1933 HeapEntry* child_entry = GetEntry(child_obj);
1934 if (child_entry != NULL) {
1935 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1936 parent_entry,
1937 index,
1938 child_entry);
1939 }
1940 }
1941
1942
SetInternalReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj,int field_offset)1943 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1944 int parent_entry,
1945 const char* reference_name,
1946 Object* child_obj,
1947 int field_offset) {
1948 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1949 HeapEntry* child_entry = GetEntry(child_obj);
1950 if (child_entry == NULL) return;
1951 if (IsEssentialObject(child_obj)) {
1952 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1953 parent_entry,
1954 reference_name,
1955 child_entry);
1956 }
1957 MarkVisitedField(parent_obj, field_offset);
1958 }
1959
1960
SetInternalReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1961 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1962 int parent_entry,
1963 int index,
1964 Object* child_obj,
1965 int field_offset) {
1966 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1967 HeapEntry* child_entry = GetEntry(child_obj);
1968 if (child_entry == NULL) return;
1969 if (IsEssentialObject(child_obj)) {
1970 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1971 parent_entry,
1972 names_->GetName(index),
1973 child_entry);
1974 }
1975 MarkVisitedField(parent_obj, field_offset);
1976 }
1977
1978
SetHiddenReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj)1979 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1980 int parent_entry,
1981 int index,
1982 Object* child_obj) {
1983 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1984 HeapEntry* child_entry = GetEntry(child_obj);
1985 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1986 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1987 parent_entry,
1988 index,
1989 child_entry);
1990 }
1991 }
1992
1993
SetWeakReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj,int field_offset)1994 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1995 int parent_entry,
1996 const char* reference_name,
1997 Object* child_obj,
1998 int field_offset) {
1999 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2000 HeapEntry* child_entry = GetEntry(child_obj);
2001 if (child_entry == NULL) return;
2002 if (IsEssentialObject(child_obj)) {
2003 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2004 parent_entry,
2005 reference_name,
2006 child_entry);
2007 }
2008 MarkVisitedField(parent_obj, field_offset);
2009 }
2010
2011
SetWeakReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)2012 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2013 int parent_entry,
2014 int index,
2015 Object* child_obj,
2016 int field_offset) {
2017 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2018 HeapEntry* child_entry = GetEntry(child_obj);
2019 if (child_entry == NULL) return;
2020 if (IsEssentialObject(child_obj)) {
2021 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2022 parent_entry,
2023 names_->GetFormatted("%d", index),
2024 child_entry);
2025 }
2026 MarkVisitedField(parent_obj, field_offset);
2027 }
2028
2029
SetDataOrAccessorPropertyReference(PropertyKind kind,JSObject * parent_obj,int parent_entry,Name * reference_name,Object * child_obj,const char * name_format_string,int field_offset)2030 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2031 PropertyKind kind, JSObject* parent_obj, int parent_entry,
2032 Name* reference_name, Object* child_obj, const char* name_format_string,
2033 int field_offset) {
2034 if (kind == kAccessor) {
2035 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
2036 child_obj, field_offset);
2037 } else {
2038 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2039 name_format_string, field_offset);
2040 }
2041 }
2042
2043
SetPropertyReference(HeapObject * parent_obj,int parent_entry,Name * reference_name,Object * child_obj,const char * name_format_string,int field_offset)2044 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2045 int parent_entry,
2046 Name* reference_name,
2047 Object* child_obj,
2048 const char* name_format_string,
2049 int field_offset) {
2050 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2051 HeapEntry* child_entry = GetEntry(child_obj);
2052 if (child_entry != NULL) {
2053 HeapGraphEdge::Type type =
2054 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2055 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2056 const char* name = name_format_string != NULL && reference_name->IsString()
2057 ? names_->GetFormatted(
2058 name_format_string,
2059 String::cast(reference_name)->ToCString(
2060 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2061 names_->GetName(reference_name);
2062
2063 filler_->SetNamedReference(type,
2064 parent_entry,
2065 name,
2066 child_entry);
2067 MarkVisitedField(parent_obj, field_offset);
2068 }
2069 }
2070
2071
SetRootGcRootsReference()2072 void V8HeapExplorer::SetRootGcRootsReference() {
2073 filler_->SetIndexedAutoIndexReference(
2074 HeapGraphEdge::kElement,
2075 snapshot_->root()->index(),
2076 snapshot_->gc_roots());
2077 }
2078
2079
SetUserGlobalReference(Object * child_obj)2080 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2081 HeapEntry* child_entry = GetEntry(child_obj);
2082 DCHECK(child_entry != NULL);
2083 filler_->SetNamedAutoIndexReference(
2084 HeapGraphEdge::kShortcut,
2085 snapshot_->root()->index(),
2086 child_entry);
2087 }
2088
2089
SetGcRootsReference(VisitorSynchronization::SyncTag tag)2090 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2091 filler_->SetIndexedAutoIndexReference(
2092 HeapGraphEdge::kElement,
2093 snapshot_->gc_roots()->index(),
2094 snapshot_->gc_subroot(tag));
2095 }
2096
2097
SetGcSubrootReference(VisitorSynchronization::SyncTag tag,bool is_weak,Object * child_obj)2098 void V8HeapExplorer::SetGcSubrootReference(
2099 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2100 HeapEntry* child_entry = GetEntry(child_obj);
2101 if (child_entry != NULL) {
2102 const char* name = GetStrongGcSubrootName(child_obj);
2103 if (name != NULL) {
2104 filler_->SetNamedReference(
2105 HeapGraphEdge::kInternal,
2106 snapshot_->gc_subroot(tag)->index(),
2107 name,
2108 child_entry);
2109 } else {
2110 if (is_weak) {
2111 filler_->SetNamedAutoIndexReference(
2112 HeapGraphEdge::kWeak,
2113 snapshot_->gc_subroot(tag)->index(),
2114 child_entry);
2115 } else {
2116 filler_->SetIndexedAutoIndexReference(
2117 HeapGraphEdge::kElement,
2118 snapshot_->gc_subroot(tag)->index(),
2119 child_entry);
2120 }
2121 }
2122
2123 // Add a shortcut to JS global object reference at snapshot root.
2124 if (child_obj->IsNativeContext()) {
2125 Context* context = Context::cast(child_obj);
2126 JSGlobalObject* global = context->global_object();
2127 if (global->IsJSGlobalObject()) {
2128 bool is_debug_object = false;
2129 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2130 if (!is_debug_object && !user_roots_.Contains(global)) {
2131 user_roots_.Insert(global);
2132 SetUserGlobalReference(global);
2133 }
2134 }
2135 }
2136 }
2137 }
2138
2139
GetStrongGcSubrootName(Object * object)2140 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2141 if (strong_gc_subroot_names_.is_empty()) {
2142 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2143 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2144 STRONG_ROOT_LIST(ROOT_NAME)
2145 #undef ROOT_NAME
2146 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2147 STRUCT_LIST(STRUCT_MAP_NAME)
2148 #undef STRUCT_MAP_NAME
2149 #define STRING_NAME(name, str) NAME_ENTRY(name)
2150 INTERNALIZED_STRING_LIST(STRING_NAME)
2151 #undef STRING_NAME
2152 #define SYMBOL_NAME(name) NAME_ENTRY(name)
2153 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2154 #undef SYMBOL_NAME
2155 #define SYMBOL_NAME(name, description) NAME_ENTRY(name)
2156 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2157 WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME)
2158 #undef SYMBOL_NAME
2159 #undef NAME_ENTRY
2160 CHECK(!strong_gc_subroot_names_.is_empty());
2161 }
2162 return strong_gc_subroot_names_.GetTag(object);
2163 }
2164
2165
TagObject(Object * obj,const char * tag)2166 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2167 if (IsEssentialObject(obj)) {
2168 HeapEntry* entry = GetEntry(obj);
2169 if (entry->name()[0] == '\0') {
2170 entry->set_name(tag);
2171 }
2172 }
2173 }
2174
2175
MarkAsWeakContainer(Object * object)2176 void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2177 if (IsEssentialObject(object) && object->IsFixedArray()) {
2178 weak_containers_.Insert(object);
2179 }
2180 }
2181
2182
2183 class GlobalObjectsEnumerator : public ObjectVisitor {
2184 public:
VisitPointers(Object ** start,Object ** end)2185 void VisitPointers(Object** start, Object** end) override {
2186 for (Object** p = start; p < end; p++) {
2187 if ((*p)->IsNativeContext()) {
2188 Context* context = Context::cast(*p);
2189 JSObject* proxy = context->global_proxy();
2190 if (proxy->IsJSGlobalProxy()) {
2191 Object* global = proxy->map()->prototype();
2192 if (global->IsJSGlobalObject()) {
2193 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2194 }
2195 }
2196 }
2197 }
2198 }
count()2199 int count() { return objects_.length(); }
at(int i)2200 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2201
2202 private:
2203 List<Handle<JSGlobalObject> > objects_;
2204 };
2205
2206
2207 // Modifies heap. Must not be run during heap traversal.
TagGlobalObjects()2208 void V8HeapExplorer::TagGlobalObjects() {
2209 Isolate* isolate = heap_->isolate();
2210 HandleScope scope(isolate);
2211 GlobalObjectsEnumerator enumerator;
2212 isolate->global_handles()->IterateAllRoots(&enumerator);
2213 const char** urls = NewArray<const char*>(enumerator.count());
2214 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2215 if (global_object_name_resolver_) {
2216 HandleScope scope(isolate);
2217 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2218 urls[i] = global_object_name_resolver_->GetName(
2219 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2220 } else {
2221 urls[i] = NULL;
2222 }
2223 }
2224
2225 DisallowHeapAllocation no_allocation;
2226 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2227 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2228 }
2229
2230 DeleteArray(urls);
2231 }
2232
2233
2234 class GlobalHandlesExtractor : public ObjectVisitor {
2235 public:
GlobalHandlesExtractor(NativeObjectsExplorer * explorer)2236 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2237 : explorer_(explorer) {}
~GlobalHandlesExtractor()2238 ~GlobalHandlesExtractor() override {}
VisitPointers(Object ** start,Object ** end)2239 void VisitPointers(Object** start, Object** end) override { UNREACHABLE(); }
VisitEmbedderReference(Object ** p,uint16_t class_id)2240 void VisitEmbedderReference(Object** p, uint16_t class_id) override {
2241 explorer_->VisitSubtreeWrapper(p, class_id);
2242 }
2243 private:
2244 NativeObjectsExplorer* explorer_;
2245 };
2246
2247
2248 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2249 public:
BasicHeapEntriesAllocator(HeapSnapshot * snapshot,HeapEntry::Type entries_type)2250 BasicHeapEntriesAllocator(
2251 HeapSnapshot* snapshot,
2252 HeapEntry::Type entries_type)
2253 : snapshot_(snapshot),
2254 names_(snapshot_->profiler()->names()),
2255 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2256 entries_type_(entries_type) {
2257 }
2258 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2259 private:
2260 HeapSnapshot* snapshot_;
2261 StringsStorage* names_;
2262 HeapObjectsMap* heap_object_map_;
2263 HeapEntry::Type entries_type_;
2264 };
2265
2266
AllocateEntry(HeapThing ptr)2267 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2268 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2269 intptr_t elements = info->GetElementCount();
2270 intptr_t size = info->GetSizeInBytes();
2271 const char* name = elements != -1
2272 ? names_->GetFormatted(
2273 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2274 : names_->GetCopy(info->GetLabel());
2275 return snapshot_->AddEntry(
2276 entries_type_,
2277 name,
2278 heap_object_map_->GenerateId(info),
2279 size != -1 ? static_cast<int>(size) : 0,
2280 0);
2281 }
2282
2283
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2284 NativeObjectsExplorer::NativeObjectsExplorer(
2285 HeapSnapshot* snapshot,
2286 SnapshottingProgressReportingInterface* progress)
2287 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2288 snapshot_(snapshot),
2289 names_(snapshot_->profiler()->names()),
2290 embedder_queried_(false),
2291 objects_by_info_(RetainedInfosMatch),
2292 native_groups_(StringsMatch),
2293 filler_(NULL) {
2294 synthetic_entries_allocator_ =
2295 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2296 native_entries_allocator_ =
2297 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2298 }
2299
2300
~NativeObjectsExplorer()2301 NativeObjectsExplorer::~NativeObjectsExplorer() {
2302 for (HashMap::Entry* p = objects_by_info_.Start();
2303 p != NULL;
2304 p = objects_by_info_.Next(p)) {
2305 v8::RetainedObjectInfo* info =
2306 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2307 info->Dispose();
2308 List<HeapObject*>* objects =
2309 reinterpret_cast<List<HeapObject*>* >(p->value);
2310 delete objects;
2311 }
2312 for (HashMap::Entry* p = native_groups_.Start();
2313 p != NULL;
2314 p = native_groups_.Next(p)) {
2315 v8::RetainedObjectInfo* info =
2316 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2317 info->Dispose();
2318 }
2319 delete synthetic_entries_allocator_;
2320 delete native_entries_allocator_;
2321 }
2322
2323
EstimateObjectsCount()2324 int NativeObjectsExplorer::EstimateObjectsCount() {
2325 FillRetainedObjects();
2326 return objects_by_info_.occupancy();
2327 }
2328
2329
FillRetainedObjects()2330 void NativeObjectsExplorer::FillRetainedObjects() {
2331 if (embedder_queried_) return;
2332 Isolate* isolate = isolate_;
2333 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2334 // Record objects that are joined into ObjectGroups.
2335 isolate->heap()->CallGCPrologueCallbacks(
2336 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2337 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2338 for (int i = 0; i < groups->length(); ++i) {
2339 ObjectGroup* group = groups->at(i);
2340 if (group->info == NULL) continue;
2341 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2342 for (size_t j = 0; j < group->length; ++j) {
2343 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2344 list->Add(obj);
2345 in_groups_.Insert(obj);
2346 }
2347 group->info = NULL; // Acquire info object ownership.
2348 }
2349 isolate->global_handles()->RemoveObjectGroups();
2350 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2351 // Record objects that are not in ObjectGroups, but have class ID.
2352 GlobalHandlesExtractor extractor(this);
2353 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2354 embedder_queried_ = true;
2355 }
2356
2357
FillImplicitReferences()2358 void NativeObjectsExplorer::FillImplicitReferences() {
2359 Isolate* isolate = isolate_;
2360 List<ImplicitRefGroup*>* groups =
2361 isolate->global_handles()->implicit_ref_groups();
2362 for (int i = 0; i < groups->length(); ++i) {
2363 ImplicitRefGroup* group = groups->at(i);
2364 HeapObject* parent = *group->parent;
2365 int parent_entry =
2366 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2367 DCHECK(parent_entry != HeapEntry::kNoEntry);
2368 Object*** children = group->children;
2369 for (size_t j = 0; j < group->length; ++j) {
2370 Object* child = *children[j];
2371 HeapEntry* child_entry =
2372 filler_->FindOrAddEntry(child, native_entries_allocator_);
2373 filler_->SetNamedReference(
2374 HeapGraphEdge::kInternal,
2375 parent_entry,
2376 "native",
2377 child_entry);
2378 }
2379 }
2380 isolate->global_handles()->RemoveImplicitRefGroups();
2381 }
2382
GetListMaybeDisposeInfo(v8::RetainedObjectInfo * info)2383 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2384 v8::RetainedObjectInfo* info) {
2385 HashMap::Entry* entry = objects_by_info_.LookupOrInsert(info, InfoHash(info));
2386 if (entry->value != NULL) {
2387 info->Dispose();
2388 } else {
2389 entry->value = new List<HeapObject*>(4);
2390 }
2391 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2392 }
2393
2394
IterateAndExtractReferences(SnapshotFiller * filler)2395 bool NativeObjectsExplorer::IterateAndExtractReferences(
2396 SnapshotFiller* filler) {
2397 filler_ = filler;
2398 FillRetainedObjects();
2399 FillImplicitReferences();
2400 if (EstimateObjectsCount() > 0) {
2401 for (HashMap::Entry* p = objects_by_info_.Start();
2402 p != NULL;
2403 p = objects_by_info_.Next(p)) {
2404 v8::RetainedObjectInfo* info =
2405 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2406 SetNativeRootReference(info);
2407 List<HeapObject*>* objects =
2408 reinterpret_cast<List<HeapObject*>* >(p->value);
2409 for (int i = 0; i < objects->length(); ++i) {
2410 SetWrapperNativeReferences(objects->at(i), info);
2411 }
2412 }
2413 SetRootNativeRootsReference();
2414 }
2415 filler_ = NULL;
2416 return true;
2417 }
2418
2419
2420 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2421 public:
NativeGroupRetainedObjectInfo(const char * label)2422 explicit NativeGroupRetainedObjectInfo(const char* label)
2423 : disposed_(false),
2424 hash_(reinterpret_cast<intptr_t>(label)),
2425 label_(label) {
2426 }
2427
~NativeGroupRetainedObjectInfo()2428 virtual ~NativeGroupRetainedObjectInfo() {}
Dispose()2429 virtual void Dispose() {
2430 CHECK(!disposed_);
2431 disposed_ = true;
2432 delete this;
2433 }
IsEquivalent(RetainedObjectInfo * other)2434 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2435 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2436 }
GetHash()2437 virtual intptr_t GetHash() { return hash_; }
GetLabel()2438 virtual const char* GetLabel() { return label_; }
2439
2440 private:
2441 bool disposed_;
2442 intptr_t hash_;
2443 const char* label_;
2444 };
2445
2446
FindOrAddGroupInfo(const char * label)2447 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2448 const char* label) {
2449 const char* label_copy = names_->GetCopy(label);
2450 uint32_t hash = StringHasher::HashSequentialString(
2451 label_copy,
2452 static_cast<int>(strlen(label_copy)),
2453 isolate_->heap()->HashSeed());
2454 HashMap::Entry* entry =
2455 native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2456 if (entry->value == NULL) {
2457 entry->value = new NativeGroupRetainedObjectInfo(label);
2458 }
2459 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2460 }
2461
2462
SetNativeRootReference(v8::RetainedObjectInfo * info)2463 void NativeObjectsExplorer::SetNativeRootReference(
2464 v8::RetainedObjectInfo* info) {
2465 HeapEntry* child_entry =
2466 filler_->FindOrAddEntry(info, native_entries_allocator_);
2467 DCHECK(child_entry != NULL);
2468 NativeGroupRetainedObjectInfo* group_info =
2469 FindOrAddGroupInfo(info->GetGroupLabel());
2470 HeapEntry* group_entry =
2471 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2472 // |FindOrAddEntry| can move and resize the entries backing store. Reload
2473 // potentially-stale pointer.
2474 child_entry = filler_->FindEntry(info);
2475 filler_->SetNamedAutoIndexReference(
2476 HeapGraphEdge::kInternal,
2477 group_entry->index(),
2478 child_entry);
2479 }
2480
2481
SetWrapperNativeReferences(HeapObject * wrapper,v8::RetainedObjectInfo * info)2482 void NativeObjectsExplorer::SetWrapperNativeReferences(
2483 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2484 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2485 DCHECK(wrapper_entry != NULL);
2486 HeapEntry* info_entry =
2487 filler_->FindOrAddEntry(info, native_entries_allocator_);
2488 DCHECK(info_entry != NULL);
2489 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2490 wrapper_entry->index(),
2491 "native",
2492 info_entry);
2493 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2494 info_entry->index(),
2495 wrapper_entry);
2496 }
2497
2498
SetRootNativeRootsReference()2499 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2500 for (HashMap::Entry* entry = native_groups_.Start();
2501 entry;
2502 entry = native_groups_.Next(entry)) {
2503 NativeGroupRetainedObjectInfo* group_info =
2504 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2505 HeapEntry* group_entry =
2506 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2507 DCHECK(group_entry != NULL);
2508 filler_->SetIndexedAutoIndexReference(
2509 HeapGraphEdge::kElement,
2510 snapshot_->root()->index(),
2511 group_entry);
2512 }
2513 }
2514
2515
VisitSubtreeWrapper(Object ** p,uint16_t class_id)2516 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2517 if (in_groups_.Contains(*p)) return;
2518 Isolate* isolate = isolate_;
2519 v8::RetainedObjectInfo* info =
2520 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2521 if (info == NULL) return;
2522 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2523 }
2524
2525
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,Heap * heap)2526 HeapSnapshotGenerator::HeapSnapshotGenerator(
2527 HeapSnapshot* snapshot,
2528 v8::ActivityControl* control,
2529 v8::HeapProfiler::ObjectNameResolver* resolver,
2530 Heap* heap)
2531 : snapshot_(snapshot),
2532 control_(control),
2533 v8_heap_explorer_(snapshot_, this, resolver),
2534 dom_explorer_(snapshot_, this),
2535 heap_(heap) {
2536 }
2537
2538
GenerateSnapshot()2539 bool HeapSnapshotGenerator::GenerateSnapshot() {
2540 v8_heap_explorer_.TagGlobalObjects();
2541
2542 // TODO(1562) Profiler assumes that any object that is in the heap after
2543 // full GC is reachable from the root when computing dominators.
2544 // This is not true for weakly reachable objects.
2545 // As a temporary solution we call GC twice.
2546 heap_->CollectAllGarbage(
2547 Heap::kMakeHeapIterableMask,
2548 "HeapSnapshotGenerator::GenerateSnapshot");
2549 heap_->CollectAllGarbage(
2550 Heap::kMakeHeapIterableMask,
2551 "HeapSnapshotGenerator::GenerateSnapshot");
2552
2553 #ifdef VERIFY_HEAP
2554 Heap* debug_heap = heap_;
2555 if (FLAG_verify_heap) {
2556 debug_heap->Verify();
2557 }
2558 #endif
2559
2560 SetProgressTotal(2); // 2 passes.
2561
2562 #ifdef VERIFY_HEAP
2563 if (FLAG_verify_heap) {
2564 debug_heap->Verify();
2565 }
2566 #endif
2567
2568 snapshot_->AddSyntheticRootEntries();
2569
2570 if (!FillReferences()) return false;
2571
2572 snapshot_->FillChildren();
2573 snapshot_->RememberLastJSObjectId();
2574
2575 progress_counter_ = progress_total_;
2576 if (!ProgressReport(true)) return false;
2577 return true;
2578 }
2579
2580
ProgressStep()2581 void HeapSnapshotGenerator::ProgressStep() {
2582 ++progress_counter_;
2583 }
2584
2585
ProgressReport(bool force)2586 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2587 const int kProgressReportGranularity = 10000;
2588 if (control_ != NULL
2589 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2590 return
2591 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2592 v8::ActivityControl::kContinue;
2593 }
2594 return true;
2595 }
2596
2597
SetProgressTotal(int iterations_count)2598 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2599 if (control_ == NULL) return;
2600 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2601 progress_total_ = iterations_count * (
2602 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2603 dom_explorer_.EstimateObjectsCount());
2604 progress_counter_ = 0;
2605 }
2606
2607
FillReferences()2608 bool HeapSnapshotGenerator::FillReferences() {
2609 SnapshotFiller filler(snapshot_, &entries_);
2610 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2611 && dom_explorer_.IterateAndExtractReferences(&filler);
2612 }
2613
2614
2615 template<int bytes> struct MaxDecimalDigitsIn;
2616 template<> struct MaxDecimalDigitsIn<4> {
2617 static const int kSigned = 11;
2618 static const int kUnsigned = 10;
2619 };
2620 template<> struct MaxDecimalDigitsIn<8> {
2621 static const int kSigned = 20;
2622 static const int kUnsigned = 20;
2623 };
2624
2625
2626 class OutputStreamWriter {
2627 public:
OutputStreamWriter(v8::OutputStream * stream)2628 explicit OutputStreamWriter(v8::OutputStream* stream)
2629 : stream_(stream),
2630 chunk_size_(stream->GetChunkSize()),
2631 chunk_(chunk_size_),
2632 chunk_pos_(0),
2633 aborted_(false) {
2634 DCHECK(chunk_size_ > 0);
2635 }
aborted()2636 bool aborted() { return aborted_; }
AddCharacter(char c)2637 void AddCharacter(char c) {
2638 DCHECK(c != '\0');
2639 DCHECK(chunk_pos_ < chunk_size_);
2640 chunk_[chunk_pos_++] = c;
2641 MaybeWriteChunk();
2642 }
AddString(const char * s)2643 void AddString(const char* s) {
2644 AddSubstring(s, StrLength(s));
2645 }
AddSubstring(const char * s,int n)2646 void AddSubstring(const char* s, int n) {
2647 if (n <= 0) return;
2648 DCHECK(static_cast<size_t>(n) <= strlen(s));
2649 const char* s_end = s + n;
2650 while (s < s_end) {
2651 int s_chunk_size =
2652 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2653 DCHECK(s_chunk_size > 0);
2654 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2655 s += s_chunk_size;
2656 chunk_pos_ += s_chunk_size;
2657 MaybeWriteChunk();
2658 }
2659 }
AddNumber(unsigned n)2660 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
Finalize()2661 void Finalize() {
2662 if (aborted_) return;
2663 DCHECK(chunk_pos_ < chunk_size_);
2664 if (chunk_pos_ != 0) {
2665 WriteChunk();
2666 }
2667 stream_->EndOfStream();
2668 }
2669
2670 private:
2671 template<typename T>
AddNumberImpl(T n,const char * format)2672 void AddNumberImpl(T n, const char* format) {
2673 // Buffer for the longest value plus trailing \0
2674 static const int kMaxNumberSize =
2675 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2676 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2677 int result = SNPrintF(
2678 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2679 DCHECK(result != -1);
2680 chunk_pos_ += result;
2681 MaybeWriteChunk();
2682 } else {
2683 EmbeddedVector<char, kMaxNumberSize> buffer;
2684 int result = SNPrintF(buffer, format, n);
2685 USE(result);
2686 DCHECK(result != -1);
2687 AddString(buffer.start());
2688 }
2689 }
MaybeWriteChunk()2690 void MaybeWriteChunk() {
2691 DCHECK(chunk_pos_ <= chunk_size_);
2692 if (chunk_pos_ == chunk_size_) {
2693 WriteChunk();
2694 }
2695 }
WriteChunk()2696 void WriteChunk() {
2697 if (aborted_) return;
2698 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2699 v8::OutputStream::kAbort) aborted_ = true;
2700 chunk_pos_ = 0;
2701 }
2702
2703 v8::OutputStream* stream_;
2704 int chunk_size_;
2705 ScopedVector<char> chunk_;
2706 int chunk_pos_;
2707 bool aborted_;
2708 };
2709
2710
2711 // type, name|index, to_node.
2712 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2713 // type, name, id, self_size, edge_count, trace_node_id.
2714 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2715
Serialize(v8::OutputStream * stream)2716 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2717 if (AllocationTracker* allocation_tracker =
2718 snapshot_->profiler()->allocation_tracker()) {
2719 allocation_tracker->PrepareForSerialization();
2720 }
2721 DCHECK(writer_ == NULL);
2722 writer_ = new OutputStreamWriter(stream);
2723 SerializeImpl();
2724 delete writer_;
2725 writer_ = NULL;
2726 }
2727
2728
SerializeImpl()2729 void HeapSnapshotJSONSerializer::SerializeImpl() {
2730 DCHECK(0 == snapshot_->root()->index());
2731 writer_->AddCharacter('{');
2732 writer_->AddString("\"snapshot\":{");
2733 SerializeSnapshot();
2734 if (writer_->aborted()) return;
2735 writer_->AddString("},\n");
2736 writer_->AddString("\"nodes\":[");
2737 SerializeNodes();
2738 if (writer_->aborted()) return;
2739 writer_->AddString("],\n");
2740 writer_->AddString("\"edges\":[");
2741 SerializeEdges();
2742 if (writer_->aborted()) return;
2743 writer_->AddString("],\n");
2744
2745 writer_->AddString("\"trace_function_infos\":[");
2746 SerializeTraceNodeInfos();
2747 if (writer_->aborted()) return;
2748 writer_->AddString("],\n");
2749 writer_->AddString("\"trace_tree\":[");
2750 SerializeTraceTree();
2751 if (writer_->aborted()) return;
2752 writer_->AddString("],\n");
2753
2754 writer_->AddString("\"samples\":[");
2755 SerializeSamples();
2756 if (writer_->aborted()) return;
2757 writer_->AddString("],\n");
2758
2759 writer_->AddString("\"strings\":[");
2760 SerializeStrings();
2761 if (writer_->aborted()) return;
2762 writer_->AddCharacter(']');
2763 writer_->AddCharacter('}');
2764 writer_->Finalize();
2765 }
2766
2767
GetStringId(const char * s)2768 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2769 HashMap::Entry* cache_entry =
2770 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2771 if (cache_entry->value == NULL) {
2772 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2773 }
2774 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2775 }
2776
2777
2778 namespace {
2779
2780 template<size_t size> struct ToUnsigned;
2781
2782 template<> struct ToUnsigned<4> {
2783 typedef uint32_t Type;
2784 };
2785
2786 template<> struct ToUnsigned<8> {
2787 typedef uint64_t Type;
2788 };
2789
2790 } // namespace
2791
2792
2793 template<typename T>
utoa_impl(T value,const Vector<char> & buffer,int buffer_pos)2794 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2795 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2796 int number_of_digits = 0;
2797 T t = value;
2798 do {
2799 ++number_of_digits;
2800 } while (t /= 10);
2801
2802 buffer_pos += number_of_digits;
2803 int result = buffer_pos;
2804 do {
2805 int last_digit = static_cast<int>(value % 10);
2806 buffer[--buffer_pos] = '0' + last_digit;
2807 value /= 10;
2808 } while (value);
2809 return result;
2810 }
2811
2812
2813 template<typename T>
utoa(T value,const Vector<char> & buffer,int buffer_pos)2814 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2815 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2816 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2817 return utoa_impl(unsigned_value, buffer, buffer_pos);
2818 }
2819
2820
SerializeEdge(HeapGraphEdge * edge,bool first_edge)2821 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2822 bool first_edge) {
2823 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2824 static const int kBufferSize =
2825 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2826 EmbeddedVector<char, kBufferSize> buffer;
2827 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2828 || edge->type() == HeapGraphEdge::kHidden
2829 ? edge->index() : GetStringId(edge->name());
2830 int buffer_pos = 0;
2831 if (!first_edge) {
2832 buffer[buffer_pos++] = ',';
2833 }
2834 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2835 buffer[buffer_pos++] = ',';
2836 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2837 buffer[buffer_pos++] = ',';
2838 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2839 buffer[buffer_pos++] = '\n';
2840 buffer[buffer_pos++] = '\0';
2841 writer_->AddString(buffer.start());
2842 }
2843
2844
SerializeEdges()2845 void HeapSnapshotJSONSerializer::SerializeEdges() {
2846 List<HeapGraphEdge*>& edges = snapshot_->children();
2847 for (int i = 0; i < edges.length(); ++i) {
2848 DCHECK(i == 0 ||
2849 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2850 SerializeEdge(edges[i], i == 0);
2851 if (writer_->aborted()) return;
2852 }
2853 }
2854
2855
SerializeNode(HeapEntry * entry)2856 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2857 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2858 static const int kBufferSize =
2859 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2860 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2861 + 6 + 1 + 1;
2862 EmbeddedVector<char, kBufferSize> buffer;
2863 int buffer_pos = 0;
2864 if (entry_index(entry) != 0) {
2865 buffer[buffer_pos++] = ',';
2866 }
2867 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2868 buffer[buffer_pos++] = ',';
2869 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2870 buffer[buffer_pos++] = ',';
2871 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2872 buffer[buffer_pos++] = ',';
2873 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2874 buffer[buffer_pos++] = ',';
2875 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2876 buffer[buffer_pos++] = ',';
2877 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2878 buffer[buffer_pos++] = '\n';
2879 buffer[buffer_pos++] = '\0';
2880 writer_->AddString(buffer.start());
2881 }
2882
2883
SerializeNodes()2884 void HeapSnapshotJSONSerializer::SerializeNodes() {
2885 List<HeapEntry>& entries = snapshot_->entries();
2886 for (int i = 0; i < entries.length(); ++i) {
2887 SerializeNode(&entries[i]);
2888 if (writer_->aborted()) return;
2889 }
2890 }
2891
2892
SerializeSnapshot()2893 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2894 writer_->AddString("\"meta\":");
2895 // The object describing node serialization layout.
2896 // We use a set of macros to improve readability.
2897 #define JSON_A(s) "[" s "]"
2898 #define JSON_O(s) "{" s "}"
2899 #define JSON_S(s) "\"" s "\""
2900 writer_->AddString(JSON_O(
2901 JSON_S("node_fields") ":" JSON_A(
2902 JSON_S("type") ","
2903 JSON_S("name") ","
2904 JSON_S("id") ","
2905 JSON_S("self_size") ","
2906 JSON_S("edge_count") ","
2907 JSON_S("trace_node_id")) ","
2908 JSON_S("node_types") ":" JSON_A(
2909 JSON_A(
2910 JSON_S("hidden") ","
2911 JSON_S("array") ","
2912 JSON_S("string") ","
2913 JSON_S("object") ","
2914 JSON_S("code") ","
2915 JSON_S("closure") ","
2916 JSON_S("regexp") ","
2917 JSON_S("number") ","
2918 JSON_S("native") ","
2919 JSON_S("synthetic") ","
2920 JSON_S("concatenated string") ","
2921 JSON_S("sliced string")) ","
2922 JSON_S("string") ","
2923 JSON_S("number") ","
2924 JSON_S("number") ","
2925 JSON_S("number") ","
2926 JSON_S("number") ","
2927 JSON_S("number")) ","
2928 JSON_S("edge_fields") ":" JSON_A(
2929 JSON_S("type") ","
2930 JSON_S("name_or_index") ","
2931 JSON_S("to_node")) ","
2932 JSON_S("edge_types") ":" JSON_A(
2933 JSON_A(
2934 JSON_S("context") ","
2935 JSON_S("element") ","
2936 JSON_S("property") ","
2937 JSON_S("internal") ","
2938 JSON_S("hidden") ","
2939 JSON_S("shortcut") ","
2940 JSON_S("weak")) ","
2941 JSON_S("string_or_number") ","
2942 JSON_S("node")) ","
2943 JSON_S("trace_function_info_fields") ":" JSON_A(
2944 JSON_S("function_id") ","
2945 JSON_S("name") ","
2946 JSON_S("script_name") ","
2947 JSON_S("script_id") ","
2948 JSON_S("line") ","
2949 JSON_S("column")) ","
2950 JSON_S("trace_node_fields") ":" JSON_A(
2951 JSON_S("id") ","
2952 JSON_S("function_info_index") ","
2953 JSON_S("count") ","
2954 JSON_S("size") ","
2955 JSON_S("children")) ","
2956 JSON_S("sample_fields") ":" JSON_A(
2957 JSON_S("timestamp_us") ","
2958 JSON_S("last_assigned_id"))));
2959 #undef JSON_S
2960 #undef JSON_O
2961 #undef JSON_A
2962 writer_->AddString(",\"node_count\":");
2963 writer_->AddNumber(snapshot_->entries().length());
2964 writer_->AddString(",\"edge_count\":");
2965 writer_->AddNumber(snapshot_->edges().length());
2966 writer_->AddString(",\"trace_function_count\":");
2967 uint32_t count = 0;
2968 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2969 if (tracker) {
2970 count = tracker->function_info_list().length();
2971 }
2972 writer_->AddNumber(count);
2973 }
2974
2975
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)2976 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2977 static const char hex_chars[] = "0123456789ABCDEF";
2978 w->AddString("\\u");
2979 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2980 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2981 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2982 w->AddCharacter(hex_chars[u & 0xf]);
2983 }
2984
2985
SerializeTraceTree()2986 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2987 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2988 if (!tracker) return;
2989 AllocationTraceTree* traces = tracker->trace_tree();
2990 SerializeTraceNode(traces->root());
2991 }
2992
2993
SerializeTraceNode(AllocationTraceNode * node)2994 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2995 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2996 const int kBufferSize =
2997 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2998 + 4 + 1 + 1;
2999 EmbeddedVector<char, kBufferSize> buffer;
3000 int buffer_pos = 0;
3001 buffer_pos = utoa(node->id(), buffer, buffer_pos);
3002 buffer[buffer_pos++] = ',';
3003 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3004 buffer[buffer_pos++] = ',';
3005 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3006 buffer[buffer_pos++] = ',';
3007 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3008 buffer[buffer_pos++] = ',';
3009 buffer[buffer_pos++] = '[';
3010 buffer[buffer_pos++] = '\0';
3011 writer_->AddString(buffer.start());
3012
3013 Vector<AllocationTraceNode*> children = node->children();
3014 for (int i = 0; i < children.length(); i++) {
3015 if (i > 0) {
3016 writer_->AddCharacter(',');
3017 }
3018 SerializeTraceNode(children[i]);
3019 }
3020 writer_->AddCharacter(']');
3021 }
3022
3023
3024 // 0-based position is converted to 1-based during the serialization.
SerializePosition(int position,const Vector<char> & buffer,int buffer_pos)3025 static int SerializePosition(int position, const Vector<char>& buffer,
3026 int buffer_pos) {
3027 if (position == -1) {
3028 buffer[buffer_pos++] = '0';
3029 } else {
3030 DCHECK(position >= 0);
3031 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3032 }
3033 return buffer_pos;
3034 }
3035
3036
SerializeTraceNodeInfos()3037 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3038 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3039 if (!tracker) return;
3040 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3041 const int kBufferSize =
3042 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3043 + 6 + 1 + 1;
3044 EmbeddedVector<char, kBufferSize> buffer;
3045 const List<AllocationTracker::FunctionInfo*>& list =
3046 tracker->function_info_list();
3047 for (int i = 0; i < list.length(); i++) {
3048 AllocationTracker::FunctionInfo* info = list[i];
3049 int buffer_pos = 0;
3050 if (i > 0) {
3051 buffer[buffer_pos++] = ',';
3052 }
3053 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3054 buffer[buffer_pos++] = ',';
3055 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3056 buffer[buffer_pos++] = ',';
3057 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3058 buffer[buffer_pos++] = ',';
3059 // The cast is safe because script id is a non-negative Smi.
3060 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3061 buffer_pos);
3062 buffer[buffer_pos++] = ',';
3063 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3064 buffer[buffer_pos++] = ',';
3065 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3066 buffer[buffer_pos++] = '\n';
3067 buffer[buffer_pos++] = '\0';
3068 writer_->AddString(buffer.start());
3069 }
3070 }
3071
3072
SerializeSamples()3073 void HeapSnapshotJSONSerializer::SerializeSamples() {
3074 const List<HeapObjectsMap::TimeInterval>& samples =
3075 snapshot_->profiler()->heap_object_map()->samples();
3076 if (samples.is_empty()) return;
3077 base::TimeTicks start_time = samples[0].timestamp;
3078 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3079 const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3080 base::TimeDelta().InMicroseconds())>::kUnsigned +
3081 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3082 2 + 1 + 1;
3083 EmbeddedVector<char, kBufferSize> buffer;
3084 for (int i = 0; i < samples.length(); i++) {
3085 HeapObjectsMap::TimeInterval& sample = samples[i];
3086 int buffer_pos = 0;
3087 if (i > 0) {
3088 buffer[buffer_pos++] = ',';
3089 }
3090 base::TimeDelta time_delta = sample.timestamp - start_time;
3091 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3092 buffer[buffer_pos++] = ',';
3093 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3094 buffer[buffer_pos++] = '\n';
3095 buffer[buffer_pos++] = '\0';
3096 writer_->AddString(buffer.start());
3097 }
3098 }
3099
3100
SerializeString(const unsigned char * s)3101 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3102 writer_->AddCharacter('\n');
3103 writer_->AddCharacter('\"');
3104 for ( ; *s != '\0'; ++s) {
3105 switch (*s) {
3106 case '\b':
3107 writer_->AddString("\\b");
3108 continue;
3109 case '\f':
3110 writer_->AddString("\\f");
3111 continue;
3112 case '\n':
3113 writer_->AddString("\\n");
3114 continue;
3115 case '\r':
3116 writer_->AddString("\\r");
3117 continue;
3118 case '\t':
3119 writer_->AddString("\\t");
3120 continue;
3121 case '\"':
3122 case '\\':
3123 writer_->AddCharacter('\\');
3124 writer_->AddCharacter(*s);
3125 continue;
3126 default:
3127 if (*s > 31 && *s < 128) {
3128 writer_->AddCharacter(*s);
3129 } else if (*s <= 31) {
3130 // Special character with no dedicated literal.
3131 WriteUChar(writer_, *s);
3132 } else {
3133 // Convert UTF-8 into \u UTF-16 literal.
3134 size_t length = 1, cursor = 0;
3135 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3136 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3137 if (c != unibrow::Utf8::kBadChar) {
3138 WriteUChar(writer_, c);
3139 DCHECK(cursor != 0);
3140 s += cursor - 1;
3141 } else {
3142 writer_->AddCharacter('?');
3143 }
3144 }
3145 }
3146 }
3147 writer_->AddCharacter('\"');
3148 }
3149
3150
SerializeStrings()3151 void HeapSnapshotJSONSerializer::SerializeStrings() {
3152 ScopedVector<const unsigned char*> sorted_strings(
3153 strings_.occupancy() + 1);
3154 for (HashMap::Entry* entry = strings_.Start();
3155 entry != NULL;
3156 entry = strings_.Next(entry)) {
3157 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3158 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3159 }
3160 writer_->AddString("\"<dummy>\"");
3161 for (int i = 1; i < sorted_strings.length(); ++i) {
3162 writer_->AddCharacter(',');
3163 SerializeString(sorted_strings[i]);
3164 if (writer_->aborted()) return;
3165 }
3166 }
3167
3168
3169 } // namespace internal
3170 } // namespace v8
3171