1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/profile-generator-inl.h"
8 
9 #include "src/compiler.h"
10 #include "src/debug.h"
11 #include "src/global-handles.h"
12 #include "src/sampler.h"
13 #include "src/scopeinfo.h"
14 #include "src/unicode.h"
15 #include "src/zone-inl.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 
StringsMatch(void * key1,void * key2)21 bool StringsStorage::StringsMatch(void* key1, void* key2) {
22   return strcmp(reinterpret_cast<char*>(key1),
23                 reinterpret_cast<char*>(key2)) == 0;
24 }
25 
26 
StringsStorage(Heap * heap)27 StringsStorage::StringsStorage(Heap* heap)
28     : hash_seed_(heap->HashSeed()), names_(StringsMatch) {
29 }
30 
31 
~StringsStorage()32 StringsStorage::~StringsStorage() {
33   for (HashMap::Entry* p = names_.Start();
34        p != NULL;
35        p = names_.Next(p)) {
36     DeleteArray(reinterpret_cast<const char*>(p->value));
37   }
38 }
39 
40 
GetCopy(const char * src)41 const char* StringsStorage::GetCopy(const char* src) {
42   int len = static_cast<int>(strlen(src));
43   HashMap::Entry* entry = GetEntry(src, len);
44   if (entry->value == NULL) {
45     Vector<char> dst = Vector<char>::New(len + 1);
46     StrNCpy(dst, src, len);
47     dst[len] = '\0';
48     entry->key = dst.start();
49     entry->value = entry->key;
50   }
51   return reinterpret_cast<const char*>(entry->value);
52 }
53 
54 
GetFormatted(const char * format,...)55 const char* StringsStorage::GetFormatted(const char* format, ...) {
56   va_list args;
57   va_start(args, format);
58   const char* result = GetVFormatted(format, args);
59   va_end(args);
60   return result;
61 }
62 
63 
AddOrDisposeString(char * str,int len)64 const char* StringsStorage::AddOrDisposeString(char* str, int len) {
65   HashMap::Entry* entry = GetEntry(str, len);
66   if (entry->value == NULL) {
67     // New entry added.
68     entry->key = str;
69     entry->value = str;
70   } else {
71     DeleteArray(str);
72   }
73   return reinterpret_cast<const char*>(entry->value);
74 }
75 
76 
GetVFormatted(const char * format,va_list args)77 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
78   Vector<char> str = Vector<char>::New(1024);
79   int len = VSNPrintF(str, format, args);
80   if (len == -1) {
81     DeleteArray(str.start());
82     return GetCopy(format);
83   }
84   return AddOrDisposeString(str.start(), len);
85 }
86 
87 
GetName(Name * name)88 const char* StringsStorage::GetName(Name* name) {
89   if (name->IsString()) {
90     String* str = String::cast(name);
91     int length = Min(kMaxNameSize, str->length());
92     int actual_length = 0;
93     SmartArrayPointer<char> data =
94         str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length,
95                        &actual_length);
96     return AddOrDisposeString(data.Detach(), actual_length);
97   } else if (name->IsSymbol()) {
98     return "<symbol>";
99   }
100   return "";
101 }
102 
103 
GetName(int index)104 const char* StringsStorage::GetName(int index) {
105   return GetFormatted("%d", index);
106 }
107 
108 
GetFunctionName(Name * name)109 const char* StringsStorage::GetFunctionName(Name* name) {
110   return GetName(name);
111 }
112 
113 
GetFunctionName(const char * name)114 const char* StringsStorage::GetFunctionName(const char* name) {
115   return GetCopy(name);
116 }
117 
118 
GetUsedMemorySize() const119 size_t StringsStorage::GetUsedMemorySize() const {
120   size_t size = sizeof(*this);
121   size += sizeof(HashMap::Entry) * names_.capacity();
122   for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
123     size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
124   }
125   return size;
126 }
127 
128 
GetEntry(const char * str,int len)129 HashMap::Entry* StringsStorage::GetEntry(const char* str, int len) {
130   uint32_t hash = StringHasher::HashSequentialString(str, len, hash_seed_);
131   return names_.Lookup(const_cast<char*>(str), hash, true);
132 }
133 
134 
135 const char* const CodeEntry::kEmptyNamePrefix = "";
136 const char* const CodeEntry::kEmptyResourceName = "";
137 const char* const CodeEntry::kEmptyBailoutReason = "";
138 
139 
~CodeEntry()140 CodeEntry::~CodeEntry() {
141   delete no_frame_ranges_;
142 }
143 
144 
GetCallUid() const145 uint32_t CodeEntry::GetCallUid() const {
146   uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
147   if (shared_id_ != 0) {
148     hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
149                                v8::internal::kZeroHashSeed);
150   } else {
151     hash ^= ComputeIntegerHash(
152         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
153         v8::internal::kZeroHashSeed);
154     hash ^= ComputeIntegerHash(
155         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
156         v8::internal::kZeroHashSeed);
157     hash ^= ComputeIntegerHash(
158         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
159         v8::internal::kZeroHashSeed);
160     hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
161   }
162   return hash;
163 }
164 
165 
IsSameAs(CodeEntry * entry) const166 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
167   return this == entry
168       || (tag_ == entry->tag_
169           && shared_id_ == entry->shared_id_
170           && (shared_id_ != 0
171               || (name_prefix_ == entry->name_prefix_
172                   && name_ == entry->name_
173                   && resource_name_ == entry->resource_name_
174                   && line_number_ == entry->line_number_)));
175 }
176 
177 
SetBuiltinId(Builtins::Name id)178 void CodeEntry::SetBuiltinId(Builtins::Name id) {
179   tag_ = Logger::BUILTIN_TAG;
180   builtin_id_ = id;
181 }
182 
183 
FindChild(CodeEntry * entry)184 ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
185   HashMap::Entry* map_entry =
186       children_.Lookup(entry, CodeEntryHash(entry), false);
187   return map_entry != NULL ?
188       reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
189 }
190 
191 
FindOrAddChild(CodeEntry * entry)192 ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
193   HashMap::Entry* map_entry =
194       children_.Lookup(entry, CodeEntryHash(entry), true);
195   if (map_entry->value == NULL) {
196     // New node added.
197     ProfileNode* new_node = new ProfileNode(tree_, entry);
198     map_entry->value = new_node;
199     children_list_.Add(new_node);
200   }
201   return reinterpret_cast<ProfileNode*>(map_entry->value);
202 }
203 
204 
Print(int indent)205 void ProfileNode::Print(int indent) {
206   base::OS::Print("%5u %*s %s%s %d #%d %s", self_ticks_, indent, "",
207                   entry_->name_prefix(), entry_->name(), entry_->script_id(),
208                   id(), entry_->bailout_reason());
209   if (entry_->resource_name()[0] != '\0')
210     base::OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
211   base::OS::Print("\n");
212   for (HashMap::Entry* p = children_.Start();
213        p != NULL;
214        p = children_.Next(p)) {
215     reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
216   }
217 }
218 
219 
220 class DeleteNodesCallback {
221  public:
BeforeTraversingChild(ProfileNode *,ProfileNode *)222   void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
223 
AfterAllChildrenTraversed(ProfileNode * node)224   void AfterAllChildrenTraversed(ProfileNode* node) {
225     delete node;
226   }
227 
AfterChildTraversed(ProfileNode *,ProfileNode *)228   void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
229 };
230 
231 
ProfileTree()232 ProfileTree::ProfileTree()
233     : root_entry_(Logger::FUNCTION_TAG, "(root)"),
234       next_node_id_(1),
235       root_(new ProfileNode(this, &root_entry_)) {
236 }
237 
238 
~ProfileTree()239 ProfileTree::~ProfileTree() {
240   DeleteNodesCallback cb;
241   TraverseDepthFirst(&cb);
242 }
243 
244 
AddPathFromEnd(const Vector<CodeEntry * > & path)245 ProfileNode* ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
246   ProfileNode* node = root_;
247   for (CodeEntry** entry = path.start() + path.length() - 1;
248        entry != path.start() - 1;
249        --entry) {
250     if (*entry != NULL) {
251       node = node->FindOrAddChild(*entry);
252     }
253   }
254   node->IncrementSelfTicks();
255   return node;
256 }
257 
258 
AddPathFromStart(const Vector<CodeEntry * > & path)259 void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
260   ProfileNode* node = root_;
261   for (CodeEntry** entry = path.start();
262        entry != path.start() + path.length();
263        ++entry) {
264     if (*entry != NULL) {
265       node = node->FindOrAddChild(*entry);
266     }
267   }
268   node->IncrementSelfTicks();
269 }
270 
271 
272 struct NodesPair {
NodesPairv8::internal::NodesPair273   NodesPair(ProfileNode* src, ProfileNode* dst)
274       : src(src), dst(dst) { }
275   ProfileNode* src;
276   ProfileNode* dst;
277 };
278 
279 
280 class Position {
281  public:
Position(ProfileNode * node)282   explicit Position(ProfileNode* node)
283       : node(node), child_idx_(0) { }
INLINE(ProfileNode * current_child ())284   INLINE(ProfileNode* current_child()) {
285     return node->children()->at(child_idx_);
286   }
INLINE(bool has_current_child ())287   INLINE(bool has_current_child()) {
288     return child_idx_ < node->children()->length();
289   }
INLINE(void next_child ())290   INLINE(void next_child()) { ++child_idx_; }
291 
292   ProfileNode* node;
293  private:
294   int child_idx_;
295 };
296 
297 
298 // Non-recursive implementation of a depth-first post-order tree traversal.
299 template <typename Callback>
TraverseDepthFirst(Callback * callback)300 void ProfileTree::TraverseDepthFirst(Callback* callback) {
301   List<Position> stack(10);
302   stack.Add(Position(root_));
303   while (stack.length() > 0) {
304     Position& current = stack.last();
305     if (current.has_current_child()) {
306       callback->BeforeTraversingChild(current.node, current.current_child());
307       stack.Add(Position(current.current_child()));
308     } else {
309       callback->AfterAllChildrenTraversed(current.node);
310       if (stack.length() > 1) {
311         Position& parent = stack[stack.length() - 2];
312         callback->AfterChildTraversed(parent.node, current.node);
313         parent.next_child();
314       }
315       // Remove child from the stack.
316       stack.RemoveLast();
317     }
318   }
319 }
320 
321 
CpuProfile(const char * title,bool record_samples)322 CpuProfile::CpuProfile(const char* title, bool record_samples)
323     : title_(title),
324       record_samples_(record_samples),
325       start_time_(base::TimeTicks::HighResolutionNow()) {
326 }
327 
328 
AddPath(base::TimeTicks timestamp,const Vector<CodeEntry * > & path)329 void CpuProfile::AddPath(base::TimeTicks timestamp,
330                          const Vector<CodeEntry*>& path) {
331   ProfileNode* top_frame_node = top_down_.AddPathFromEnd(path);
332   if (record_samples_) {
333     timestamps_.Add(timestamp);
334     samples_.Add(top_frame_node);
335   }
336 }
337 
338 
CalculateTotalTicksAndSamplingRate()339 void CpuProfile::CalculateTotalTicksAndSamplingRate() {
340   end_time_ = base::TimeTicks::HighResolutionNow();
341 }
342 
343 
Print()344 void CpuProfile::Print() {
345   base::OS::Print("[Top down]:\n");
346   top_down_.Print();
347 }
348 
349 
350 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
351 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
352 
353 
AddCode(Address addr,CodeEntry * entry,unsigned size)354 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
355   DeleteAllCoveredCode(addr, addr + size);
356   CodeTree::Locator locator;
357   tree_.Insert(addr, &locator);
358   locator.set_value(CodeEntryInfo(entry, size));
359 }
360 
361 
DeleteAllCoveredCode(Address start,Address end)362 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
363   List<Address> to_delete;
364   Address addr = end - 1;
365   while (addr >= start) {
366     CodeTree::Locator locator;
367     if (!tree_.FindGreatestLessThan(addr, &locator)) break;
368     Address start2 = locator.key(), end2 = start2 + locator.value().size;
369     if (start2 < end && start < end2) to_delete.Add(start2);
370     addr = start2 - 1;
371   }
372   for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
373 }
374 
375 
FindEntry(Address addr,Address * start)376 CodeEntry* CodeMap::FindEntry(Address addr, Address* start) {
377   CodeTree::Locator locator;
378   if (tree_.FindGreatestLessThan(addr, &locator)) {
379     // locator.key() <= addr. Need to check that addr is within entry.
380     const CodeEntryInfo& entry = locator.value();
381     if (addr < (locator.key() + entry.size)) {
382       if (start) {
383         *start = locator.key();
384       }
385       return entry.entry;
386     }
387   }
388   return NULL;
389 }
390 
391 
GetSharedId(Address addr)392 int CodeMap::GetSharedId(Address addr) {
393   CodeTree::Locator locator;
394   // For shared function entries, 'size' field is used to store their IDs.
395   if (tree_.Find(addr, &locator)) {
396     const CodeEntryInfo& entry = locator.value();
397     DCHECK(entry.entry == kSharedFunctionCodeEntry);
398     return entry.size;
399   } else {
400     tree_.Insert(addr, &locator);
401     int id = next_shared_id_++;
402     locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
403     return id;
404   }
405 }
406 
407 
MoveCode(Address from,Address to)408 void CodeMap::MoveCode(Address from, Address to) {
409   if (from == to) return;
410   CodeTree::Locator locator;
411   if (!tree_.Find(from, &locator)) return;
412   CodeEntryInfo entry = locator.value();
413   tree_.Remove(from);
414   AddCode(to, entry.entry, entry.size);
415 }
416 
417 
Call(const Address & key,const CodeMap::CodeEntryInfo & value)418 void CodeMap::CodeTreePrinter::Call(
419     const Address& key, const CodeMap::CodeEntryInfo& value) {
420   // For shared function entries, 'size' field is used to store their IDs.
421   if (value.entry == kSharedFunctionCodeEntry) {
422     base::OS::Print("%p SharedFunctionInfo %d\n", key, value.size);
423   } else {
424     base::OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
425   }
426 }
427 
428 
Print()429 void CodeMap::Print() {
430   CodeTreePrinter printer;
431   tree_.ForEach(&printer);
432 }
433 
434 
CpuProfilesCollection(Heap * heap)435 CpuProfilesCollection::CpuProfilesCollection(Heap* heap)
436     : function_and_resource_names_(heap),
437       current_profiles_semaphore_(1) {
438 }
439 
440 
DeleteCodeEntry(CodeEntry ** entry_ptr)441 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
442   delete *entry_ptr;
443 }
444 
445 
DeleteCpuProfile(CpuProfile ** profile_ptr)446 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
447   delete *profile_ptr;
448 }
449 
450 
~CpuProfilesCollection()451 CpuProfilesCollection::~CpuProfilesCollection() {
452   finished_profiles_.Iterate(DeleteCpuProfile);
453   current_profiles_.Iterate(DeleteCpuProfile);
454   code_entries_.Iterate(DeleteCodeEntry);
455 }
456 
457 
StartProfiling(const char * title,bool record_samples)458 bool CpuProfilesCollection::StartProfiling(const char* title,
459                                            bool record_samples) {
460   current_profiles_semaphore_.Wait();
461   if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
462     current_profiles_semaphore_.Signal();
463     return false;
464   }
465   for (int i = 0; i < current_profiles_.length(); ++i) {
466     if (strcmp(current_profiles_[i]->title(), title) == 0) {
467       // Ignore attempts to start profile with the same title...
468       current_profiles_semaphore_.Signal();
469       // ... though return true to force it collect a sample.
470       return true;
471     }
472   }
473   current_profiles_.Add(new CpuProfile(title, record_samples));
474   current_profiles_semaphore_.Signal();
475   return true;
476 }
477 
478 
StopProfiling(const char * title)479 CpuProfile* CpuProfilesCollection::StopProfiling(const char* title) {
480   const int title_len = StrLength(title);
481   CpuProfile* profile = NULL;
482   current_profiles_semaphore_.Wait();
483   for (int i = current_profiles_.length() - 1; i >= 0; --i) {
484     if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
485       profile = current_profiles_.Remove(i);
486       break;
487     }
488   }
489   current_profiles_semaphore_.Signal();
490 
491   if (profile == NULL) return NULL;
492   profile->CalculateTotalTicksAndSamplingRate();
493   finished_profiles_.Add(profile);
494   return profile;
495 }
496 
497 
IsLastProfile(const char * title)498 bool CpuProfilesCollection::IsLastProfile(const char* title) {
499   // Called from VM thread, and only it can mutate the list,
500   // so no locking is needed here.
501   if (current_profiles_.length() != 1) return false;
502   return StrLength(title) == 0
503       || strcmp(current_profiles_[0]->title(), title) == 0;
504 }
505 
506 
RemoveProfile(CpuProfile * profile)507 void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
508   // Called from VM thread for a completed profile.
509   for (int i = 0; i < finished_profiles_.length(); i++) {
510     if (profile == finished_profiles_[i]) {
511       finished_profiles_.Remove(i);
512       return;
513     }
514   }
515   UNREACHABLE();
516 }
517 
518 
AddPathToCurrentProfiles(base::TimeTicks timestamp,const Vector<CodeEntry * > & path)519 void CpuProfilesCollection::AddPathToCurrentProfiles(
520     base::TimeTicks timestamp, const Vector<CodeEntry*>& path) {
521   // As starting / stopping profiles is rare relatively to this
522   // method, we don't bother minimizing the duration of lock holding,
523   // e.g. copying contents of the list to a local vector.
524   current_profiles_semaphore_.Wait();
525   for (int i = 0; i < current_profiles_.length(); ++i) {
526     current_profiles_[i]->AddPath(timestamp, path);
527   }
528   current_profiles_semaphore_.Signal();
529 }
530 
531 
NewCodeEntry(Logger::LogEventsAndTags tag,const char * name,const char * name_prefix,const char * resource_name,int line_number,int column_number)532 CodeEntry* CpuProfilesCollection::NewCodeEntry(
533       Logger::LogEventsAndTags tag,
534       const char* name,
535       const char* name_prefix,
536       const char* resource_name,
537       int line_number,
538       int column_number) {
539   CodeEntry* code_entry = new CodeEntry(tag,
540                                         name,
541                                         name_prefix,
542                                         resource_name,
543                                         line_number,
544                                         column_number);
545   code_entries_.Add(code_entry);
546   return code_entry;
547 }
548 
549 
550 const char* const ProfileGenerator::kProgramEntryName =
551     "(program)";
552 const char* const ProfileGenerator::kIdleEntryName =
553     "(idle)";
554 const char* const ProfileGenerator::kGarbageCollectorEntryName =
555     "(garbage collector)";
556 const char* const ProfileGenerator::kUnresolvedFunctionName =
557     "(unresolved function)";
558 
559 
ProfileGenerator(CpuProfilesCollection * profiles)560 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
561     : profiles_(profiles),
562       program_entry_(
563           profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
564       idle_entry_(
565           profiles->NewCodeEntry(Logger::FUNCTION_TAG, kIdleEntryName)),
566       gc_entry_(
567           profiles->NewCodeEntry(Logger::BUILTIN_TAG,
568                                  kGarbageCollectorEntryName)),
569       unresolved_entry_(
570           profiles->NewCodeEntry(Logger::FUNCTION_TAG,
571                                  kUnresolvedFunctionName)) {
572 }
573 
574 
RecordTickSample(const TickSample & sample)575 void ProfileGenerator::RecordTickSample(const TickSample& sample) {
576   // Allocate space for stack frames + pc + function + vm-state.
577   ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
578   // As actual number of decoded code entries may vary, initialize
579   // entries vector with NULL values.
580   CodeEntry** entry = entries.start();
581   memset(entry, 0, entries.length() * sizeof(*entry));
582   if (sample.pc != NULL) {
583     if (sample.has_external_callback && sample.state == EXTERNAL &&
584         sample.top_frame_type == StackFrame::EXIT) {
585       // Don't use PC when in external callback code, as it can point
586       // inside callback's code, and we will erroneously report
587       // that a callback calls itself.
588       *entry++ = code_map_.FindEntry(sample.external_callback);
589     } else {
590       Address start;
591       CodeEntry* pc_entry = code_map_.FindEntry(sample.pc, &start);
592       // If pc is in the function code before it set up stack frame or after the
593       // frame was destroyed SafeStackFrameIterator incorrectly thinks that
594       // ebp contains return address of the current function and skips caller's
595       // frame. Check for this case and just skip such samples.
596       if (pc_entry) {
597         List<OffsetRange>* ranges = pc_entry->no_frame_ranges();
598         if (ranges) {
599           Code* code = Code::cast(HeapObject::FromAddress(start));
600           int pc_offset = static_cast<int>(
601               sample.pc - code->instruction_start());
602           for (int i = 0; i < ranges->length(); i++) {
603             OffsetRange& range = ranges->at(i);
604             if (range.from <= pc_offset && pc_offset < range.to) {
605               return;
606             }
607           }
608         }
609         *entry++ = pc_entry;
610 
611         if (pc_entry->builtin_id() == Builtins::kFunctionCall ||
612             pc_entry->builtin_id() == Builtins::kFunctionApply) {
613           // When current function is FunctionCall or FunctionApply builtin the
614           // top frame is either frame of the calling JS function or internal
615           // frame. In the latter case we know the caller for sure but in the
616           // former case we don't so we simply replace the frame with
617           // 'unresolved' entry.
618           if (sample.top_frame_type == StackFrame::JAVA_SCRIPT) {
619             *entry++ = unresolved_entry_;
620           }
621         }
622       }
623     }
624 
625     for (const Address* stack_pos = sample.stack,
626            *stack_end = stack_pos + sample.frames_count;
627          stack_pos != stack_end;
628          ++stack_pos) {
629       *entry++ = code_map_.FindEntry(*stack_pos);
630     }
631   }
632 
633   if (FLAG_prof_browser_mode) {
634     bool no_symbolized_entries = true;
635     for (CodeEntry** e = entries.start(); e != entry; ++e) {
636       if (*e != NULL) {
637         no_symbolized_entries = false;
638         break;
639       }
640     }
641     // If no frames were symbolized, put the VM state entry in.
642     if (no_symbolized_entries) {
643       *entry++ = EntryForVMState(sample.state);
644     }
645   }
646 
647   profiles_->AddPathToCurrentProfiles(sample.timestamp, entries);
648 }
649 
650 
EntryForVMState(StateTag tag)651 CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
652   switch (tag) {
653     case GC:
654       return gc_entry_;
655     case JS:
656     case COMPILER:
657     // DOM events handlers are reported as OTHER / EXTERNAL entries.
658     // To avoid confusing people, let's put all these entries into
659     // one bucket.
660     case OTHER:
661     case EXTERNAL:
662       return program_entry_;
663     case IDLE:
664       return idle_entry_;
665     default: return NULL;
666   }
667 }
668 
669 } }  // namespace v8::internal
670