1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/debug/debug-coverage.h"
6 
7 #include "src/ast/ast.h"
8 #include "src/base/hashmap.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h"
11 #include "src/frames-inl.h"
12 #include "src/isolate.h"
13 #include "src/objects.h"
14 #include "src/objects/debug-objects-inl.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 class SharedToCounterMap
20     : public base::TemplateHashMapImpl<SharedFunctionInfo*, uint32_t,
21                                        base::KeyEqualityMatcher<void*>,
22                                        base::DefaultAllocationPolicy> {
23  public:
24   typedef base::TemplateHashMapEntry<SharedFunctionInfo*, uint32_t> Entry;
Add(SharedFunctionInfo * key,uint32_t count)25   inline void Add(SharedFunctionInfo* key, uint32_t count) {
26     Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
27     uint32_t old_count = entry->value;
28     if (UINT32_MAX - count < old_count) {
29       entry->value = UINT32_MAX;
30     } else {
31       entry->value = old_count + count;
32     }
33   }
34 
Get(SharedFunctionInfo * key)35   inline uint32_t Get(SharedFunctionInfo* key) {
36     Entry* entry = Lookup(key, Hash(key));
37     if (entry == nullptr) return 0;
38     return entry->value;
39   }
40 
41  private:
Hash(SharedFunctionInfo * key)42   static uint32_t Hash(SharedFunctionInfo* key) {
43     return static_cast<uint32_t>(reinterpret_cast<intptr_t>(key));
44   }
45 
46   DisallowHeapAllocation no_gc;
47 };
48 
49 namespace {
StartPosition(SharedFunctionInfo * info)50 int StartPosition(SharedFunctionInfo* info) {
51   int start = info->function_token_position();
52   if (start == kNoSourcePosition) start = info->StartPosition();
53   return start;
54 }
55 
CompareSharedFunctionInfo(SharedFunctionInfo * a,SharedFunctionInfo * b)56 bool CompareSharedFunctionInfo(SharedFunctionInfo* a, SharedFunctionInfo* b) {
57   int a_start = StartPosition(a);
58   int b_start = StartPosition(b);
59   if (a_start == b_start) return a->EndPosition() > b->EndPosition();
60   return a_start < b_start;
61 }
62 
CompareCoverageBlock(const CoverageBlock & a,const CoverageBlock & b)63 bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
64   DCHECK_NE(kNoSourcePosition, a.start);
65   DCHECK_NE(kNoSourcePosition, b.start);
66   if (a.start == b.start) return a.end > b.end;
67   return a.start < b.start;
68 }
69 
SortBlockData(std::vector<CoverageBlock> & v)70 void SortBlockData(std::vector<CoverageBlock>& v) {
71   // Sort according to the block nesting structure.
72   std::sort(v.begin(), v.end(), CompareCoverageBlock);
73 }
74 
GetSortedBlockData(SharedFunctionInfo * shared)75 std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo* shared) {
76   DCHECK(shared->HasCoverageInfo());
77 
78   CoverageInfo* coverage_info =
79       CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
80 
81   std::vector<CoverageBlock> result;
82   if (coverage_info->SlotCount() == 0) return result;
83 
84   for (int i = 0; i < coverage_info->SlotCount(); i++) {
85     const int start_pos = coverage_info->StartSourcePosition(i);
86     const int until_pos = coverage_info->EndSourcePosition(i);
87     const int count = coverage_info->BlockCount(i);
88 
89     DCHECK_NE(kNoSourcePosition, start_pos);
90     result.emplace_back(start_pos, until_pos, count);
91   }
92 
93   SortBlockData(result);
94 
95   return result;
96 }
97 
98 // A utility class to simplify logic for performing passes over block coverage
99 // ranges. Provides access to the implicit tree structure of ranges (i.e. access
100 // to parent and sibling blocks), and supports efficient in-place editing and
101 // deletion. The underlying backing store is the array of CoverageBlocks stored
102 // on the CoverageFunction.
103 class CoverageBlockIterator final {
104  public:
CoverageBlockIterator(CoverageFunction * function)105   explicit CoverageBlockIterator(CoverageFunction* function)
106       : function_(function),
107         ended_(false),
108         delete_current_(false),
109         read_index_(-1),
110         write_index_(-1) {
111     DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
112                           CompareCoverageBlock));
113   }
114 
~CoverageBlockIterator()115   ~CoverageBlockIterator() {
116     Finalize();
117     DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
118                           CompareCoverageBlock));
119   }
120 
HasNext() const121   bool HasNext() const {
122     return read_index_ + 1 < static_cast<int>(function_->blocks.size());
123   }
124 
Next()125   bool Next() {
126     if (!HasNext()) {
127       if (!ended_) MaybeWriteCurrent();
128       ended_ = true;
129       return false;
130     }
131 
132     // If a block has been deleted, subsequent iteration moves trailing blocks
133     // to their updated position within the array.
134     MaybeWriteCurrent();
135 
136     if (read_index_ == -1) {
137       // Initialize the nesting stack with the function range.
138       nesting_stack_.emplace_back(function_->start, function_->end,
139                                   function_->count);
140     } else if (!delete_current_) {
141       nesting_stack_.emplace_back(GetBlock());
142     }
143 
144     delete_current_ = false;
145     read_index_++;
146 
147     DCHECK(IsActive());
148 
149     CoverageBlock& block = GetBlock();
150     while (nesting_stack_.size() > 1 &&
151            nesting_stack_.back().end <= block.start) {
152       nesting_stack_.pop_back();
153     }
154 
155     DCHECK_IMPLIES(block.start >= function_->end,
156                    block.end == kNoSourcePosition);
157     DCHECK_NE(block.start, kNoSourcePosition);
158     DCHECK_LE(block.end, GetParent().end);
159 
160     return true;
161   }
162 
GetBlock()163   CoverageBlock& GetBlock() {
164     DCHECK(IsActive());
165     return function_->blocks[read_index_];
166   }
167 
GetNextBlock()168   CoverageBlock& GetNextBlock() {
169     DCHECK(IsActive());
170     DCHECK(HasNext());
171     return function_->blocks[read_index_ + 1];
172   }
173 
GetPreviousBlock()174   CoverageBlock& GetPreviousBlock() {
175     DCHECK(IsActive());
176     DCHECK_GT(read_index_, 0);
177     return function_->blocks[read_index_ - 1];
178   }
179 
GetParent()180   CoverageBlock& GetParent() {
181     DCHECK(IsActive());
182     return nesting_stack_.back();
183   }
184 
HasSiblingOrChild()185   bool HasSiblingOrChild() {
186     DCHECK(IsActive());
187     return HasNext() && GetNextBlock().start < GetParent().end;
188   }
189 
GetSiblingOrChild()190   CoverageBlock& GetSiblingOrChild() {
191     DCHECK(HasSiblingOrChild());
192     DCHECK(IsActive());
193     return GetNextBlock();
194   }
195 
196   // A range is considered to be at top level if its parent range is the
197   // function range.
IsTopLevel() const198   bool IsTopLevel() const { return nesting_stack_.size() == 1; }
199 
DeleteBlock()200   void DeleteBlock() {
201     DCHECK(!delete_current_);
202     DCHECK(IsActive());
203     delete_current_ = true;
204   }
205 
206  private:
MaybeWriteCurrent()207   void MaybeWriteCurrent() {
208     if (delete_current_) return;
209     if (read_index_ >= 0 && write_index_ != read_index_) {
210       function_->blocks[write_index_] = function_->blocks[read_index_];
211     }
212     write_index_++;
213   }
214 
Finalize()215   void Finalize() {
216     while (Next()) {
217       // Just iterate to the end.
218     }
219     function_->blocks.resize(write_index_);
220   }
221 
IsActive() const222   bool IsActive() const { return read_index_ >= 0 && !ended_; }
223 
224   CoverageFunction* function_;
225   std::vector<CoverageBlock> nesting_stack_;
226   bool ended_;
227   bool delete_current_;
228   int read_index_;
229   int write_index_;
230 };
231 
HaveSameSourceRange(const CoverageBlock & lhs,const CoverageBlock & rhs)232 bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
233   return lhs.start == rhs.start && lhs.end == rhs.end;
234 }
235 
MergeDuplicateSingletons(CoverageFunction * function)236 void MergeDuplicateSingletons(CoverageFunction* function) {
237   CoverageBlockIterator iter(function);
238 
239   while (iter.Next() && iter.HasNext()) {
240     CoverageBlock& block = iter.GetBlock();
241     CoverageBlock& next_block = iter.GetNextBlock();
242 
243     // Identical ranges should only occur through singleton ranges. Consider the
244     // ranges for `for (.) break;`: continuation ranges for both the `break` and
245     // `for` statements begin after the trailing semicolon.
246     // Such ranges are merged and keep the maximal execution count.
247     if (!HaveSameSourceRange(block, next_block)) continue;
248 
249     DCHECK_EQ(kNoSourcePosition, block.end);  // Singleton range.
250     next_block.count = std::max(block.count, next_block.count);
251     iter.DeleteBlock();
252   }
253 }
254 
MergeDuplicateRanges(CoverageFunction * function)255 void MergeDuplicateRanges(CoverageFunction* function) {
256   CoverageBlockIterator iter(function);
257 
258   while (iter.Next() && iter.HasNext()) {
259     CoverageBlock& block = iter.GetBlock();
260     CoverageBlock& next_block = iter.GetNextBlock();
261 
262     if (!HaveSameSourceRange(block, next_block)) continue;
263 
264     DCHECK_NE(kNoSourcePosition, block.end);  // Non-singleton range.
265     next_block.count = std::max(block.count, next_block.count);
266     iter.DeleteBlock();
267   }
268 }
269 
270 // Rewrite position singletons (produced by unconditional control flow
271 // like return statements, and by continuation counters) into source
272 // ranges that end at the next sibling range or the end of the parent
273 // range, whichever comes first.
RewritePositionSingletonsToRanges(CoverageFunction * function)274 void RewritePositionSingletonsToRanges(CoverageFunction* function) {
275   CoverageBlockIterator iter(function);
276 
277   while (iter.Next()) {
278     CoverageBlock& block = iter.GetBlock();
279     CoverageBlock& parent = iter.GetParent();
280 
281     if (block.start >= function->end) {
282       DCHECK_EQ(block.end, kNoSourcePosition);
283       iter.DeleteBlock();
284     } else if (block.end == kNoSourcePosition) {
285       // The current block ends at the next sibling block (if it exists) or the
286       // end of the parent block otherwise.
287       if (iter.HasSiblingOrChild()) {
288         block.end = iter.GetSiblingOrChild().start;
289       } else if (iter.IsTopLevel()) {
290         // See https://crbug.com/v8/6661. Functions are special-cased because
291         // we never want the closing brace to be uncovered. This is mainly to
292         // avoid a noisy UI.
293         block.end = parent.end - 1;
294       } else {
295         block.end = parent.end;
296       }
297     }
298   }
299 }
300 
MergeConsecutiveRanges(CoverageFunction * function)301 void MergeConsecutiveRanges(CoverageFunction* function) {
302   CoverageBlockIterator iter(function);
303 
304   while (iter.Next()) {
305     CoverageBlock& block = iter.GetBlock();
306 
307     if (iter.HasSiblingOrChild()) {
308       CoverageBlock& sibling = iter.GetSiblingOrChild();
309       if (sibling.start == block.end && sibling.count == block.count) {
310         // Best-effort: this pass may miss mergeable siblings in the presence of
311         // child blocks.
312         sibling.start = block.start;
313         iter.DeleteBlock();
314       }
315     }
316   }
317 }
318 
MergeNestedRanges(CoverageFunction * function)319 void MergeNestedRanges(CoverageFunction* function) {
320   CoverageBlockIterator iter(function);
321 
322   while (iter.Next()) {
323     CoverageBlock& block = iter.GetBlock();
324     CoverageBlock& parent = iter.GetParent();
325 
326     if (parent.count == block.count) {
327       // Transformation may not be valid if sibling blocks exist with a
328       // differing count.
329       iter.DeleteBlock();
330     }
331   }
332 }
333 
FilterAliasedSingletons(CoverageFunction * function)334 void FilterAliasedSingletons(CoverageFunction* function) {
335   CoverageBlockIterator iter(function);
336 
337   iter.Next();  // Advance once since we reference the previous block later.
338 
339   while (iter.Next()) {
340     CoverageBlock& previous_block = iter.GetPreviousBlock();
341     CoverageBlock& block = iter.GetBlock();
342 
343     bool is_singleton = block.end == kNoSourcePosition;
344     bool aliases_start = block.start == previous_block.start;
345 
346     if (is_singleton && aliases_start) {
347       // The previous block must have a full range since duplicate singletons
348       // have already been merged.
349       DCHECK_NE(previous_block.end, kNoSourcePosition);
350       // Likewise, the next block must have another start position since
351       // singletons are sorted to the end.
352       DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
353       iter.DeleteBlock();
354     }
355   }
356 }
357 
FilterUncoveredRanges(CoverageFunction * function)358 void FilterUncoveredRanges(CoverageFunction* function) {
359   CoverageBlockIterator iter(function);
360 
361   while (iter.Next()) {
362     CoverageBlock& block = iter.GetBlock();
363     CoverageBlock& parent = iter.GetParent();
364     if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
365   }
366 }
367 
FilterEmptyRanges(CoverageFunction * function)368 void FilterEmptyRanges(CoverageFunction* function) {
369   CoverageBlockIterator iter(function);
370 
371   while (iter.Next()) {
372     CoverageBlock& block = iter.GetBlock();
373     if (block.start == block.end) iter.DeleteBlock();
374   }
375 }
376 
ClampToBinary(CoverageFunction * function)377 void ClampToBinary(CoverageFunction* function) {
378   CoverageBlockIterator iter(function);
379 
380   while (iter.Next()) {
381     CoverageBlock& block = iter.GetBlock();
382     if (block.count > 0) block.count = 1;
383   }
384 }
385 
ResetAllBlockCounts(SharedFunctionInfo * shared)386 void ResetAllBlockCounts(SharedFunctionInfo* shared) {
387   DCHECK(shared->HasCoverageInfo());
388 
389   CoverageInfo* coverage_info =
390       CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
391 
392   for (int i = 0; i < coverage_info->SlotCount(); i++) {
393     coverage_info->ResetBlockCount(i);
394   }
395 }
396 
IsBlockMode(debug::Coverage::Mode mode)397 bool IsBlockMode(debug::Coverage::Mode mode) {
398   switch (mode) {
399     case debug::Coverage::kBlockBinary:
400     case debug::Coverage::kBlockCount:
401       return true;
402     default:
403       return false;
404   }
405 }
406 
IsBinaryMode(debug::Coverage::Mode mode)407 bool IsBinaryMode(debug::Coverage::Mode mode) {
408   switch (mode) {
409     case debug::Coverage::kBlockBinary:
410     case debug::Coverage::kPreciseBinary:
411       return true;
412     default:
413       return false;
414   }
415 }
416 
CollectBlockCoverage(CoverageFunction * function,SharedFunctionInfo * info,debug::Coverage::Mode mode)417 void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo* info,
418                           debug::Coverage::Mode mode) {
419   DCHECK(IsBlockMode(mode));
420 
421   function->has_block_coverage = true;
422   function->blocks = GetSortedBlockData(info);
423 
424   // If in binary mode, only report counts of 0/1.
425   if (mode == debug::Coverage::kBlockBinary) ClampToBinary(function);
426 
427   // Remove duplicate singleton ranges, keeping the max count.
428   MergeDuplicateSingletons(function);
429 
430   // Remove singleton ranges with the same start position as a full range and
431   // throw away their counts.
432   // Singleton ranges are only intended to split existing full ranges and should
433   // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
434   // as a problematic example; if the then-block produces a continuation
435   // singleton, it would incorrectly expand into the else range.
436   // For more context, see https://crbug.com/v8/8237.
437   FilterAliasedSingletons(function);
438 
439   // Rewrite all singletons (created e.g. by continuations and unconditional
440   // control flow) to ranges.
441   RewritePositionSingletonsToRanges(function);
442 
443   // Merge nested and consecutive ranges with identical counts.
444   // Note that it's necessary to merge duplicate ranges prior to merging nested
445   // changes in order to avoid invalid transformations. See crbug.com/827530.
446   MergeConsecutiveRanges(function);
447 
448   SortBlockData(function->blocks);
449   MergeDuplicateRanges(function);
450   MergeNestedRanges(function);
451 
452   MergeConsecutiveRanges(function);
453 
454   // Filter out ranges with count == 0 unless the immediate parent range has
455   // a count != 0.
456   FilterUncoveredRanges(function);
457 
458   // Filter out ranges of zero length.
459   FilterEmptyRanges(function);
460 
461   // Reset all counters on the DebugInfo to zero.
462   ResetAllBlockCounts(info);
463 }
464 }  // anonymous namespace
465 
CollectPrecise(Isolate * isolate)466 std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
467   DCHECK(!isolate->is_best_effort_code_coverage());
468   std::unique_ptr<Coverage> result =
469       Collect(isolate, isolate->code_coverage_mode());
470   if (!isolate->is_collecting_type_profile() &&
471       (isolate->is_precise_binary_code_coverage() ||
472        isolate->is_block_binary_code_coverage())) {
473     // We do not have to hold onto feedback vectors for invocations we already
474     // reported. So we can reset the list.
475     isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
476   }
477   return result;
478 }
479 
CollectBestEffort(Isolate * isolate)480 std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
481   return Collect(isolate, v8::debug::Coverage::kBestEffort);
482 }
483 
Collect(Isolate * isolate,v8::debug::Coverage::Mode collectionMode)484 std::unique_ptr<Coverage> Coverage::Collect(
485     Isolate* isolate, v8::debug::Coverage::Mode collectionMode) {
486   SharedToCounterMap counter_map;
487 
488   const bool reset_count = collectionMode != v8::debug::Coverage::kBestEffort;
489 
490   switch (isolate->code_coverage_mode()) {
491     case v8::debug::Coverage::kBlockBinary:
492     case v8::debug::Coverage::kBlockCount:
493     case v8::debug::Coverage::kPreciseBinary:
494     case v8::debug::Coverage::kPreciseCount: {
495       // Feedback vectors are already listed to prevent losing them to GC.
496       DCHECK(isolate->factory()
497                  ->feedback_vectors_for_profiling_tools()
498                  ->IsArrayList());
499       Handle<ArrayList> list = Handle<ArrayList>::cast(
500           isolate->factory()->feedback_vectors_for_profiling_tools());
501       for (int i = 0; i < list->Length(); i++) {
502         FeedbackVector* vector = FeedbackVector::cast(list->Get(i));
503         SharedFunctionInfo* shared = vector->shared_function_info();
504         DCHECK(shared->IsSubjectToDebugging());
505         uint32_t count = static_cast<uint32_t>(vector->invocation_count());
506         if (reset_count) vector->clear_invocation_count();
507         counter_map.Add(shared, count);
508       }
509       break;
510     }
511     case v8::debug::Coverage::kBestEffort: {
512       DCHECK(!isolate->factory()
513                   ->feedback_vectors_for_profiling_tools()
514                   ->IsArrayList());
515       DCHECK_EQ(v8::debug::Coverage::kBestEffort, collectionMode);
516       HeapIterator heap_iterator(isolate->heap());
517       while (HeapObject* current_obj = heap_iterator.next()) {
518         if (!current_obj->IsFeedbackVector()) continue;
519         FeedbackVector* vector = FeedbackVector::cast(current_obj);
520         SharedFunctionInfo* shared = vector->shared_function_info();
521         if (!shared->IsSubjectToDebugging()) continue;
522         uint32_t count = static_cast<uint32_t>(vector->invocation_count());
523         counter_map.Add(shared, count);
524       }
525       break;
526     }
527   }
528 
529   // Iterate shared function infos of every script and build a mapping
530   // between source ranges and invocation counts.
531   std::unique_ptr<Coverage> result(new Coverage());
532   Script::Iterator scripts(isolate);
533   while (Script* script = scripts.Next()) {
534     if (!script->IsUserJavaScript()) continue;
535 
536     // Create and add new script data.
537     Handle<Script> script_handle(script, isolate);
538     result->emplace_back(script_handle);
539     std::vector<CoverageFunction>* functions = &result->back().functions;
540 
541     std::vector<SharedFunctionInfo*> sorted;
542 
543     {
544       // Sort functions by start position, from outer to inner functions.
545       SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
546       while (SharedFunctionInfo* info = infos.Next()) {
547         sorted.push_back(info);
548       }
549       std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
550     }
551 
552     // Stack to track nested functions, referring function by index.
553     std::vector<size_t> nesting;
554 
555     // Use sorted list to reconstruct function nesting.
556     for (SharedFunctionInfo* info : sorted) {
557       int start = StartPosition(info);
558       int end = info->EndPosition();
559       uint32_t count = counter_map.Get(info);
560       // Find the correct outer function based on start position.
561       while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
562         nesting.pop_back();
563       }
564       if (count != 0) {
565         switch (collectionMode) {
566           case v8::debug::Coverage::kBlockCount:
567           case v8::debug::Coverage::kPreciseCount:
568             break;
569           case v8::debug::Coverage::kBlockBinary:
570           case v8::debug::Coverage::kPreciseBinary:
571             count = info->has_reported_binary_coverage() ? 0 : 1;
572             info->set_has_reported_binary_coverage(true);
573             break;
574           case v8::debug::Coverage::kBestEffort:
575             count = 1;
576             break;
577         }
578       }
579 
580       Handle<String> name(info->DebugName(), isolate);
581       CoverageFunction function(start, end, count, name);
582 
583       if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
584         CollectBlockCoverage(&function, info, collectionMode);
585       }
586 
587       // Only include a function range if itself or its parent function is
588       // covered, or if it contains non-trivial block coverage.
589       bool is_covered = (count != 0);
590       bool parent_is_covered =
591           (!nesting.empty() && functions->at(nesting.back()).count != 0);
592       bool has_block_coverage = !function.blocks.empty();
593       if (is_covered || parent_is_covered || has_block_coverage) {
594         nesting.push_back(functions->size());
595         functions->emplace_back(function);
596       }
597     }
598 
599     // Remove entries for scripts that have no coverage.
600     if (functions->empty()) result->pop_back();
601   }
602   return result;
603 }
604 
SelectMode(Isolate * isolate,debug::Coverage::Mode mode)605 void Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) {
606   switch (mode) {
607     case debug::Coverage::kBestEffort:
608       // Note that DevTools switches back to best-effort coverage once the
609       // recording is stopped. Since we delete coverage infos at that point, any
610       // following coverage recording (without reloads) will be at function
611       // granularity.
612       isolate->debug()->RemoveAllCoverageInfos();
613       if (!isolate->is_collecting_type_profile()) {
614         isolate->SetFeedbackVectorsForProfilingTools(
615             ReadOnlyRoots(isolate).undefined_value());
616       }
617       break;
618     case debug::Coverage::kBlockBinary:
619     case debug::Coverage::kBlockCount:
620     case debug::Coverage::kPreciseBinary:
621     case debug::Coverage::kPreciseCount: {
622       HandleScope scope(isolate);
623 
624       // Remove all optimized function. Optimized and inlined functions do not
625       // increment invocation count.
626       Deoptimizer::DeoptimizeAll(isolate);
627 
628       // Root all feedback vectors to avoid early collection.
629       isolate->MaybeInitializeVectorListFromHeap();
630 
631       HeapIterator heap_iterator(isolate->heap());
632       while (HeapObject* o = heap_iterator.next()) {
633         if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
634           // If collecting binary coverage, reset
635           // SFI::has_reported_binary_coverage to avoid optimizing / inlining
636           // functions before they have reported coverage.
637           SharedFunctionInfo* shared = SharedFunctionInfo::cast(o);
638           shared->set_has_reported_binary_coverage(false);
639         } else if (o->IsFeedbackVector()) {
640           // In any case, clear any collected invocation counts.
641           FeedbackVector* vector = FeedbackVector::cast(o);
642           vector->clear_invocation_count();
643         }
644       }
645 
646       break;
647     }
648   }
649   isolate->set_code_coverage_mode(mode);
650 }
651 
652 }  // namespace internal
653 }  // namespace v8
654