1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/gc-tracer.h"
6
7 #include "src/counters.h"
8 #include "src/heap/heap-inl.h"
9 #include "src/isolate.h"
10
11 namespace v8 {
12 namespace internal {
13
CountTotalHolesSize(Heap * heap)14 static size_t CountTotalHolesSize(Heap* heap) {
15 size_t holes_size = 0;
16 OldSpaces spaces(heap);
17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
18 DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
19 holes_size += space->Waste() + space->Available();
20 }
21 return holes_size;
22 }
23
24
Scope(GCTracer * tracer,ScopeId scope)25 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
26 : tracer_(tracer), scope_(scope) {
27 // All accesses to incremental_marking_scope assume that incremental marking
28 // scopes come first.
29 STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0);
30 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
31 // TODO(cbruni): remove once we fully moved to a trace-based system.
32 if (V8_UNLIKELY(FLAG_runtime_stats)) {
33 RuntimeCallStats::Enter(
34 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_,
35 &RuntimeCallStats::GC);
36 }
37 }
38
~Scope()39 GCTracer::Scope::~Scope() {
40 tracer_->AddScopeSample(
41 scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
42 // TODO(cbruni): remove once we fully moved to a trace-based system.
43 if (V8_UNLIKELY(FLAG_runtime_stats)) {
44 RuntimeCallStats::Leave(
45 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_);
46 }
47 }
48
Name(ScopeId id)49 const char* GCTracer::Scope::Name(ScopeId id) {
50 #define CASE(scope) \
51 case Scope::scope: \
52 return "V8.GC_" #scope;
53 switch (id) {
54 TRACER_SCOPES(CASE)
55 case Scope::NUMBER_OF_SCOPES:
56 break;
57 }
58 #undef CASE
59 return "(unknown)";
60 }
61
Event(Type type,GarbageCollectionReason gc_reason,const char * collector_reason)62 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
63 const char* collector_reason)
64 : type(type),
65 gc_reason(gc_reason),
66 collector_reason(collector_reason),
67 start_time(0.0),
68 end_time(0.0),
69 reduce_memory(false),
70 start_object_size(0),
71 end_object_size(0),
72 start_memory_size(0),
73 end_memory_size(0),
74 start_holes_size(0),
75 end_holes_size(0),
76 new_space_object_size(0),
77 survived_new_space_object_size(0),
78 incremental_marking_bytes(0),
79 incremental_marking_duration(0.0) {
80 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
81 scopes[i] = 0;
82 }
83 }
84
TypeName(bool short_name) const85 const char* GCTracer::Event::TypeName(bool short_name) const {
86 switch (type) {
87 case SCAVENGER:
88 return (short_name) ? "s" : "Scavenge";
89 case MARK_COMPACTOR:
90 case INCREMENTAL_MARK_COMPACTOR:
91 return (short_name) ? "ms" : "Mark-sweep";
92 case MINOR_MARK_COMPACTOR:
93 return (short_name) ? "mmc" : "Minor Mark-Compact";
94 case START:
95 return (short_name) ? "st" : "Start";
96 }
97 return "Unknown Event Type";
98 }
99
GCTracer(Heap * heap)100 GCTracer::GCTracer(Heap* heap)
101 : heap_(heap),
102 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
103 previous_(current_),
104 incremental_marking_bytes_(0),
105 incremental_marking_duration_(0.0),
106 incremental_marking_start_time_(0.0),
107 recorded_incremental_marking_speed_(0.0),
108 allocation_time_ms_(0.0),
109 new_space_allocation_counter_bytes_(0),
110 old_generation_allocation_counter_bytes_(0),
111 allocation_duration_since_gc_(0.0),
112 new_space_allocation_in_bytes_since_gc_(0),
113 old_generation_allocation_in_bytes_since_gc_(0),
114 combined_mark_compact_speed_cache_(0.0),
115 start_counter_(0) {
116 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
117 }
118
ResetForTesting()119 void GCTracer::ResetForTesting() {
120 current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
121 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
122 previous_ = current_;
123 ResetIncrementalMarkingCounters();
124 allocation_time_ms_ = 0.0;
125 new_space_allocation_counter_bytes_ = 0.0;
126 old_generation_allocation_counter_bytes_ = 0.0;
127 allocation_duration_since_gc_ = 0.0;
128 new_space_allocation_in_bytes_since_gc_ = 0.0;
129 old_generation_allocation_in_bytes_since_gc_ = 0.0;
130 combined_mark_compact_speed_cache_ = 0.0;
131 recorded_minor_gcs_total_.Reset();
132 recorded_minor_gcs_survived_.Reset();
133 recorded_compactions_.Reset();
134 recorded_mark_compacts_.Reset();
135 recorded_incremental_mark_compacts_.Reset();
136 recorded_new_generation_allocations_.Reset();
137 recorded_old_generation_allocations_.Reset();
138 recorded_context_disposal_times_.Reset();
139 recorded_survival_ratios_.Reset();
140 start_counter_ = 0;
141 }
142
Start(GarbageCollector collector,GarbageCollectionReason gc_reason,const char * collector_reason)143 void GCTracer::Start(GarbageCollector collector,
144 GarbageCollectionReason gc_reason,
145 const char* collector_reason) {
146 start_counter_++;
147 if (start_counter_ != 1) return;
148
149 previous_ = current_;
150 double start_time = heap_->MonotonicallyIncreasingTimeInMs();
151 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
152 heap_->OldGenerationAllocationCounter());
153
154 switch (collector) {
155 case SCAVENGER:
156 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
157 break;
158 case MINOR_MARK_COMPACTOR:
159 current_ =
160 Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
161 break;
162 case MARK_COMPACTOR:
163 if (heap_->incremental_marking()->WasActivated()) {
164 current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
165 collector_reason);
166 } else {
167 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
168 }
169 break;
170 }
171
172 current_.reduce_memory = heap_->ShouldReduceMemory();
173 current_.start_time = start_time;
174 current_.start_object_size = heap_->SizeOfObjects();
175 current_.start_memory_size = heap_->memory_allocator()->Size();
176 current_.start_holes_size = CountTotalHolesSize(heap_);
177 current_.new_space_object_size =
178 heap_->new_space()->top() - heap_->new_space()->bottom();
179
180 current_.incremental_marking_bytes = 0;
181 current_.incremental_marking_duration = 0;
182
183 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
184 current_.scopes[i] = 0;
185 }
186
187 size_t committed_memory = heap_->CommittedMemory() / KB;
188 size_t used_memory = current_.start_object_size / KB;
189
190 Counters* counters = heap_->isolate()->counters();
191
192 if (Heap::IsYoungGenerationCollector(collector)) {
193 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
194 } else {
195 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
196 }
197 counters->aggregated_memory_heap_committed()->AddSample(start_time,
198 committed_memory);
199 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory);
200 // TODO(cbruni): remove once we fully moved to a trace-based system.
201 if (V8_UNLIKELY(FLAG_runtime_stats)) {
202 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(),
203 &timer_, &RuntimeCallStats::GC);
204 }
205 }
206
ResetIncrementalMarkingCounters()207 void GCTracer::ResetIncrementalMarkingCounters() {
208 incremental_marking_bytes_ = 0;
209 incremental_marking_duration_ = 0;
210 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
211 incremental_marking_scopes_[i].ResetCurrentCycle();
212 }
213 }
214
Stop(GarbageCollector collector)215 void GCTracer::Stop(GarbageCollector collector) {
216 start_counter_--;
217 if (start_counter_ != 0) {
218 heap_->isolate()->PrintWithTimestamp("[Finished reentrant %s during %s.]\n",
219 Heap::CollectorName(collector),
220 current_.TypeName(false));
221 return;
222 }
223
224 DCHECK(start_counter_ >= 0);
225 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
226 (collector == MINOR_MARK_COMPACTOR &&
227 current_.type == Event::MINOR_MARK_COMPACTOR) ||
228 (collector == MARK_COMPACTOR &&
229 (current_.type == Event::MARK_COMPACTOR ||
230 current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
231
232 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
233 current_.end_object_size = heap_->SizeOfObjects();
234 current_.end_memory_size = heap_->memory_allocator()->Size();
235 current_.end_holes_size = CountTotalHolesSize(heap_);
236 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
237
238 AddAllocation(current_.end_time);
239
240 size_t committed_memory = heap_->CommittedMemory() / KB;
241 size_t used_memory = current_.end_object_size / KB;
242 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
243 current_.end_time, committed_memory);
244 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
245 current_.end_time, used_memory);
246
247 double duration = current_.end_time - current_.start_time;
248
249 switch (current_.type) {
250 case Event::SCAVENGER:
251 case Event::MINOR_MARK_COMPACTOR:
252 recorded_minor_gcs_total_.Push(
253 MakeBytesAndDuration(current_.new_space_object_size, duration));
254 recorded_minor_gcs_survived_.Push(MakeBytesAndDuration(
255 current_.survived_new_space_object_size, duration));
256 break;
257 case Event::INCREMENTAL_MARK_COMPACTOR:
258 current_.incremental_marking_bytes = incremental_marking_bytes_;
259 current_.incremental_marking_duration = incremental_marking_duration_;
260 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
261 current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
262 current_.scopes[i] = incremental_marking_scopes_[i].duration;
263 }
264 RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
265 current_.incremental_marking_duration);
266 recorded_incremental_mark_compacts_.Push(
267 MakeBytesAndDuration(current_.start_object_size, duration));
268 ResetIncrementalMarkingCounters();
269 combined_mark_compact_speed_cache_ = 0.0;
270 break;
271 case Event::MARK_COMPACTOR:
272 DCHECK_EQ(0u, current_.incremental_marking_bytes);
273 DCHECK_EQ(0, current_.incremental_marking_duration);
274 recorded_mark_compacts_.Push(
275 MakeBytesAndDuration(current_.start_object_size, duration));
276 ResetIncrementalMarkingCounters();
277 combined_mark_compact_speed_cache_ = 0.0;
278 break;
279 case Event::START:
280 UNREACHABLE();
281 }
282
283 heap_->UpdateTotalGCTime(duration);
284
285 if ((current_.type == Event::SCAVENGER ||
286 current_.type == Event::MINOR_MARK_COMPACTOR) &&
287 FLAG_trace_gc_ignore_scavenger)
288 return;
289
290 if (FLAG_trace_gc_nvp) {
291 PrintNVP();
292 } else {
293 Print();
294 }
295
296 if (FLAG_trace_gc) {
297 heap_->PrintShortHeapStatistics();
298 }
299
300 // TODO(cbruni): remove once we fully moved to a trace-based system.
301 if (V8_UNLIKELY(FLAG_runtime_stats)) {
302 RuntimeCallStats::Leave(heap_->isolate()->counters()->runtime_call_stats(),
303 &timer_);
304 }
305 }
306
307
SampleAllocation(double current_ms,size_t new_space_counter_bytes,size_t old_generation_counter_bytes)308 void GCTracer::SampleAllocation(double current_ms,
309 size_t new_space_counter_bytes,
310 size_t old_generation_counter_bytes) {
311 if (allocation_time_ms_ == 0) {
312 // It is the first sample.
313 allocation_time_ms_ = current_ms;
314 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
315 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
316 return;
317 }
318 // This assumes that counters are unsigned integers so that the subtraction
319 // below works even if the new counter is less than the old counter.
320 size_t new_space_allocated_bytes =
321 new_space_counter_bytes - new_space_allocation_counter_bytes_;
322 size_t old_generation_allocated_bytes =
323 old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
324 double duration = current_ms - allocation_time_ms_;
325 allocation_time_ms_ = current_ms;
326 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
327 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
328 allocation_duration_since_gc_ += duration;
329 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
330 old_generation_allocation_in_bytes_since_gc_ +=
331 old_generation_allocated_bytes;
332 }
333
334
AddAllocation(double current_ms)335 void GCTracer::AddAllocation(double current_ms) {
336 allocation_time_ms_ = current_ms;
337 if (allocation_duration_since_gc_ > 0) {
338 recorded_new_generation_allocations_.Push(
339 MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
340 allocation_duration_since_gc_));
341 recorded_old_generation_allocations_.Push(
342 MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
343 allocation_duration_since_gc_));
344 }
345 allocation_duration_since_gc_ = 0;
346 new_space_allocation_in_bytes_since_gc_ = 0;
347 old_generation_allocation_in_bytes_since_gc_ = 0;
348 }
349
350
AddContextDisposalTime(double time)351 void GCTracer::AddContextDisposalTime(double time) {
352 recorded_context_disposal_times_.Push(time);
353 }
354
AddCompactionEvent(double duration,size_t live_bytes_compacted)355 void GCTracer::AddCompactionEvent(double duration,
356 size_t live_bytes_compacted) {
357 recorded_compactions_.Push(
358 MakeBytesAndDuration(live_bytes_compacted, duration));
359 }
360
361
AddSurvivalRatio(double promotion_ratio)362 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
363 recorded_survival_ratios_.Push(promotion_ratio);
364 }
365
AddIncrementalMarkingStep(double duration,size_t bytes)366 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
367 if (bytes > 0) {
368 incremental_marking_bytes_ += bytes;
369 incremental_marking_duration_ += duration;
370 }
371 }
372
Output(const char * format,...) const373 void GCTracer::Output(const char* format, ...) const {
374 if (FLAG_trace_gc) {
375 va_list arguments;
376 va_start(arguments, format);
377 base::OS::VPrint(format, arguments);
378 va_end(arguments);
379 }
380
381 const int kBufferSize = 256;
382 char raw_buffer[kBufferSize];
383 Vector<char> buffer(raw_buffer, kBufferSize);
384 va_list arguments2;
385 va_start(arguments2, format);
386 VSNPrintF(buffer, format, arguments2);
387 va_end(arguments2);
388
389 heap_->AddToRingBuffer(buffer.start());
390 }
391
Print() const392 void GCTracer::Print() const {
393 double duration = current_.end_time - current_.start_time;
394 const size_t kIncrementalStatsSize = 128;
395 char incremental_buffer[kIncrementalStatsSize] = {0};
396
397 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
398 base::OS::SNPrintF(
399 incremental_buffer, kIncrementalStatsSize,
400 " (+ %.1f ms in %d steps since start of marking, "
401 "biggest step %.1f ms, walltime since start of marking %.f ms)",
402 current_.scopes[Scope::MC_INCREMENTAL],
403 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
404 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
405 current_.end_time - incremental_marking_start_time_);
406 }
407
408 // Avoid PrintF as Output also appends the string to the tracing ring buffer
409 // that gets printed on OOM failures.
410 Output(
411 "[%d:%p] "
412 "%8.0f ms: "
413 "%s %.1f (%.1f) -> %.1f (%.1f) MB, "
414 "%.1f / %.1f ms %s %s %s\n",
415 base::OS::GetCurrentProcessId(),
416 reinterpret_cast<void*>(heap_->isolate()),
417 heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
418 static_cast<double>(current_.start_object_size) / MB,
419 static_cast<double>(current_.start_memory_size) / MB,
420 static_cast<double>(current_.end_object_size) / MB,
421 static_cast<double>(current_.end_memory_size) / MB, duration,
422 TotalExternalTime(), incremental_buffer,
423 Heap::GarbageCollectionReasonToString(current_.gc_reason),
424 current_.collector_reason != nullptr ? current_.collector_reason : "");
425 }
426
427
PrintNVP() const428 void GCTracer::PrintNVP() const {
429 double duration = current_.end_time - current_.start_time;
430 double spent_in_mutator = current_.start_time - previous_.end_time;
431 size_t allocated_since_last_gc =
432 current_.start_object_size - previous_.end_object_size;
433
434 double incremental_walltime_duration = 0;
435
436 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
437 incremental_walltime_duration =
438 current_.end_time - incremental_marking_start_time_;
439 }
440
441 switch (current_.type) {
442 case Event::SCAVENGER:
443 heap_->isolate()->PrintWithTimestamp(
444 "pause=%.1f "
445 "mutator=%.1f "
446 "gc=%s "
447 "reduce_memory=%d "
448 "scavenge=%.2f "
449 "old_new=%.2f "
450 "weak=%.2f "
451 "roots=%.2f "
452 "code=%.2f "
453 "semispace=%.2f "
454 "external.prologue=%.2f "
455 "external.epilogue=%.2f "
456 "external_weak_global_handles=%.2f "
457 "steps_count=%d "
458 "steps_took=%.1f "
459 "scavenge_throughput=%.f "
460 "total_size_before=%" PRIuS
461 " "
462 "total_size_after=%" PRIuS
463 " "
464 "holes_size_before=%" PRIuS
465 " "
466 "holes_size_after=%" PRIuS
467 " "
468 "allocated=%" PRIuS
469 " "
470 "promoted=%" PRIuS
471 " "
472 "semi_space_copied=%" PRIuS
473 " "
474 "nodes_died_in_new=%d "
475 "nodes_copied_in_new=%d "
476 "nodes_promoted=%d "
477 "promotion_ratio=%.1f%% "
478 "average_survival_ratio=%.1f%% "
479 "promotion_rate=%.1f%% "
480 "semi_space_copy_rate=%.1f%% "
481 "new_space_allocation_throughput=%.1f "
482 "context_disposal_rate=%.1f\n",
483 duration, spent_in_mutator, current_.TypeName(true),
484 current_.reduce_memory, current_.scopes[Scope::SCAVENGER_SCAVENGE],
485 current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
486 current_.scopes[Scope::SCAVENGER_WEAK],
487 current_.scopes[Scope::SCAVENGER_ROOTS],
488 current_.scopes[Scope::SCAVENGER_CODE_FLUSH_CANDIDATES],
489 current_.scopes[Scope::SCAVENGER_SEMISPACE],
490 current_.scopes[Scope::EXTERNAL_PROLOGUE],
491 current_.scopes[Scope::EXTERNAL_EPILOGUE],
492 current_.scopes[Scope::EXTERNAL_WEAK_GLOBAL_HANDLES],
493 current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
494 .steps,
495 current_.scopes[Scope::MC_INCREMENTAL],
496 ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
497 current_.end_object_size, current_.start_holes_size,
498 current_.end_holes_size, allocated_since_last_gc,
499 heap_->promoted_objects_size(),
500 heap_->semi_space_copied_object_size(),
501 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
502 heap_->nodes_promoted_, heap_->promotion_ratio_,
503 AverageSurvivalRatio(), heap_->promotion_rate_,
504 heap_->semi_space_copied_rate_,
505 NewSpaceAllocationThroughputInBytesPerMillisecond(),
506 ContextDisposalRateInMilliseconds());
507 break;
508 case Event::MINOR_MARK_COMPACTOR:
509 heap_->isolate()->PrintWithTimestamp(
510 "pause=%.1f "
511 "mutator=%.1f "
512 "gc=%s "
513 "reduce_memory=%d\n",
514 duration, spent_in_mutator, current_.TypeName(true),
515 current_.reduce_memory);
516 break;
517 case Event::MARK_COMPACTOR:
518 case Event::INCREMENTAL_MARK_COMPACTOR:
519 heap_->isolate()->PrintWithTimestamp(
520 "pause=%.1f "
521 "mutator=%.1f "
522 "gc=%s "
523 "reduce_memory=%d "
524 "clear=%1.f "
525 "clear.code_flush=%.1f "
526 "clear.dependent_code=%.1f "
527 "clear.global_handles=%.1f "
528 "clear.maps=%.1f "
529 "clear.slots_buffer=%.1f "
530 "clear.store_buffer=%.1f "
531 "clear.string_table=%.1f "
532 "clear.weak_cells=%.1f "
533 "clear.weak_collections=%.1f "
534 "clear.weak_lists=%.1f "
535 "epilogue=%.1f "
536 "evacuate=%.1f "
537 "evacuate.candidates=%.1f "
538 "evacuate.clean_up=%.1f "
539 "evacuate.copy=%.1f "
540 "evacuate.update_pointers=%.1f "
541 "evacuate.update_pointers.to_evacuated=%.1f "
542 "evacuate.update_pointers.to_new=%.1f "
543 "evacuate.update_pointers.weak=%.1f "
544 "external.prologue=%.1f "
545 "external.epilogue=%.1f "
546 "external.weak_global_handles=%.1f "
547 "finish=%.1f "
548 "mark=%.1f "
549 "mark.finish_incremental=%.1f "
550 "mark.object_grouping=%.1f "
551 "mark.prepare_code_flush=%.1f "
552 "mark.roots=%.1f "
553 "mark.weak_closure=%.1f "
554 "mark.weak_closure.ephemeral=%.1f "
555 "mark.weak_closure.weak_handles=%.1f "
556 "mark.weak_closure.weak_roots=%.1f "
557 "mark.weak_closure.harmony=%.1f "
558 "mark.wrapper_prologue=%.1f "
559 "mark.wrapper_epilogue=%.1f "
560 "mark.wrapper_tracing=%.1f "
561 "prologue=%.1f "
562 "sweep=%.1f "
563 "sweep.code=%.1f "
564 "sweep.map=%.1f "
565 "sweep.old=%.1f "
566 "incremental=%.1f "
567 "incremental.finalize=%.1f "
568 "incremental.finalize.body=%.1f "
569 "incremental.finalize.external.prologue=%.1f "
570 "incremental.finalize.external.epilogue=%.1f "
571 "incremental.finalize.object_grouping=%.1f "
572 "incremental.sweeping=%.1f "
573 "incremental.wrapper_prologue=%.1f "
574 "incremental.wrapper_tracing=%.1f "
575 "incremental_wrapper_tracing_longest_step=%.1f "
576 "incremental_finalize_longest_step=%.1f "
577 "incremental_finalize_steps_count=%d "
578 "incremental_longest_step=%.1f "
579 "incremental_steps_count=%d "
580 "incremental_marking_throughput=%.f "
581 "incremental_walltime_duration=%.f "
582 "total_size_before=%" PRIuS
583 " "
584 "total_size_after=%" PRIuS
585 " "
586 "holes_size_before=%" PRIuS
587 " "
588 "holes_size_after=%" PRIuS
589 " "
590 "allocated=%" PRIuS
591 " "
592 "promoted=%" PRIuS
593 " "
594 "semi_space_copied=%" PRIuS
595 " "
596 "nodes_died_in_new=%d "
597 "nodes_copied_in_new=%d "
598 "nodes_promoted=%d "
599 "promotion_ratio=%.1f%% "
600 "average_survival_ratio=%.1f%% "
601 "promotion_rate=%.1f%% "
602 "semi_space_copy_rate=%.1f%% "
603 "new_space_allocation_throughput=%.1f "
604 "context_disposal_rate=%.1f "
605 "compaction_speed=%.f\n",
606 duration, spent_in_mutator, current_.TypeName(true),
607 current_.reduce_memory, current_.scopes[Scope::MC_CLEAR],
608 current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
609 current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
610 current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
611 current_.scopes[Scope::MC_CLEAR_MAPS],
612 current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
613 current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
614 current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
615 current_.scopes[Scope::MC_CLEAR_WEAK_CELLS],
616 current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
617 current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
618 current_.scopes[Scope::MC_EPILOGUE],
619 current_.scopes[Scope::MC_EVACUATE],
620 current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
621 current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
622 current_.scopes[Scope::MC_EVACUATE_COPY],
623 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
624 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED],
625 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW],
626 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
627 current_.scopes[Scope::EXTERNAL_PROLOGUE],
628 current_.scopes[Scope::EXTERNAL_EPILOGUE],
629 current_.scopes[Scope::EXTERNAL_WEAK_GLOBAL_HANDLES],
630 current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
631 current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
632 current_.scopes[Scope::MC_MARK_OBJECT_GROUPING],
633 current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
634 current_.scopes[Scope::MC_MARK_ROOTS],
635 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
636 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERAL],
637 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
638 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
639 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
640 current_.scopes[Scope::MC_MARK_WRAPPER_PROLOGUE],
641 current_.scopes[Scope::MC_MARK_WRAPPER_EPILOGUE],
642 current_.scopes[Scope::MC_MARK_WRAPPER_TRACING],
643 current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
644 current_.scopes[Scope::MC_SWEEP_CODE],
645 current_.scopes[Scope::MC_SWEEP_MAP],
646 current_.scopes[Scope::MC_SWEEP_OLD],
647 current_.scopes[Scope::MC_INCREMENTAL],
648 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
649 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
650 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
651 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
652 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING],
653 current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
654 current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE],
655 current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING],
656 current_
657 .incremental_marking_scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING]
658 .longest_step,
659 current_
660 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
661 .longest_step,
662 current_
663 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
664 .steps,
665 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
666 .longest_step,
667 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
668 IncrementalMarkingSpeedInBytesPerMillisecond(),
669 incremental_walltime_duration, current_.start_object_size,
670 current_.end_object_size, current_.start_holes_size,
671 current_.end_holes_size, allocated_since_last_gc,
672 heap_->promoted_objects_size(),
673 heap_->semi_space_copied_object_size(),
674 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
675 heap_->nodes_promoted_, heap_->promotion_ratio_,
676 AverageSurvivalRatio(), heap_->promotion_rate_,
677 heap_->semi_space_copied_rate_,
678 NewSpaceAllocationThroughputInBytesPerMillisecond(),
679 ContextDisposalRateInMilliseconds(),
680 CompactionSpeedInBytesPerMillisecond());
681 break;
682 case Event::START:
683 break;
684 default:
685 UNREACHABLE();
686 }
687 }
688
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer,const BytesAndDuration & initial,double time_ms)689 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
690 const BytesAndDuration& initial, double time_ms) {
691 BytesAndDuration sum = buffer.Sum(
692 [time_ms](BytesAndDuration a, BytesAndDuration b) {
693 if (time_ms != 0 && a.second >= time_ms) return a;
694 return std::make_pair(a.first + b.first, a.second + b.second);
695 },
696 initial);
697 uint64_t bytes = sum.first;
698 double durations = sum.second;
699 if (durations == 0.0) return 0;
700 double speed = bytes / durations;
701 const int max_speed = 1024 * MB;
702 const int min_speed = 1;
703 if (speed >= max_speed) return max_speed;
704 if (speed <= min_speed) return min_speed;
705 return speed;
706 }
707
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer)708 double GCTracer::AverageSpeed(
709 const base::RingBuffer<BytesAndDuration>& buffer) {
710 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
711 }
712
RecordIncrementalMarkingSpeed(size_t bytes,double duration)713 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
714 if (duration == 0 || bytes == 0) return;
715 double current_speed = bytes / duration;
716 if (recorded_incremental_marking_speed_ == 0) {
717 recorded_incremental_marking_speed_ = current_speed;
718 } else {
719 recorded_incremental_marking_speed_ =
720 (recorded_incremental_marking_speed_ + current_speed) / 2;
721 }
722 }
723
IncrementalMarkingSpeedInBytesPerMillisecond() const724 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
725 const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
726 if (recorded_incremental_marking_speed_ != 0) {
727 return recorded_incremental_marking_speed_;
728 }
729 if (incremental_marking_duration_ != 0.0) {
730 return incremental_marking_bytes_ / incremental_marking_duration_;
731 }
732 return kConservativeSpeedInBytesPerMillisecond;
733 }
734
ScavengeSpeedInBytesPerMillisecond(ScavengeSpeedMode mode) const735 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
736 ScavengeSpeedMode mode) const {
737 if (mode == kForAllObjects) {
738 return AverageSpeed(recorded_minor_gcs_total_);
739 } else {
740 return AverageSpeed(recorded_minor_gcs_survived_);
741 }
742 }
743
CompactionSpeedInBytesPerMillisecond() const744 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
745 return AverageSpeed(recorded_compactions_);
746 }
747
MarkCompactSpeedInBytesPerMillisecond() const748 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
749 return AverageSpeed(recorded_mark_compacts_);
750 }
751
FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const752 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
753 return AverageSpeed(recorded_incremental_mark_compacts_);
754 }
755
CombinedMarkCompactSpeedInBytesPerMillisecond()756 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
757 if (combined_mark_compact_speed_cache_ > 0)
758 return combined_mark_compact_speed_cache_;
759 const double kMinimumMarkingSpeed = 0.5;
760 double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
761 double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
762 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
763 // No data for the incremental marking speed.
764 // Return the non-incremental mark-compact speed.
765 combined_mark_compact_speed_cache_ =
766 MarkCompactSpeedInBytesPerMillisecond();
767 } else {
768 // Combine the speed of incremental step and the speed of the final step.
769 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
770 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
771 }
772 return combined_mark_compact_speed_cache_;
773 }
774
NewSpaceAllocationThroughputInBytesPerMillisecond(double time_ms) const775 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
776 double time_ms) const {
777 size_t bytes = new_space_allocation_in_bytes_since_gc_;
778 double durations = allocation_duration_since_gc_;
779 return AverageSpeed(recorded_new_generation_allocations_,
780 MakeBytesAndDuration(bytes, durations), time_ms);
781 }
782
OldGenerationAllocationThroughputInBytesPerMillisecond(double time_ms) const783 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
784 double time_ms) const {
785 size_t bytes = old_generation_allocation_in_bytes_since_gc_;
786 double durations = allocation_duration_since_gc_;
787 return AverageSpeed(recorded_old_generation_allocations_,
788 MakeBytesAndDuration(bytes, durations), time_ms);
789 }
790
AllocationThroughputInBytesPerMillisecond(double time_ms) const791 double GCTracer::AllocationThroughputInBytesPerMillisecond(
792 double time_ms) const {
793 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
794 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
795 }
796
CurrentAllocationThroughputInBytesPerMillisecond() const797 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
798 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
799 }
800
CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const801 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
802 const {
803 return OldGenerationAllocationThroughputInBytesPerMillisecond(
804 kThroughputTimeFrameMs);
805 }
806
ContextDisposalRateInMilliseconds() const807 double GCTracer::ContextDisposalRateInMilliseconds() const {
808 if (recorded_context_disposal_times_.Count() <
809 recorded_context_disposal_times_.kSize)
810 return 0.0;
811 double begin = heap_->MonotonicallyIncreasingTimeInMs();
812 double end = recorded_context_disposal_times_.Sum(
813 [](double a, double b) { return b; }, 0.0);
814 return (begin - end) / recorded_context_disposal_times_.Count();
815 }
816
AverageSurvivalRatio() const817 double GCTracer::AverageSurvivalRatio() const {
818 if (recorded_survival_ratios_.Count() == 0) return 0.0;
819 double sum = recorded_survival_ratios_.Sum(
820 [](double a, double b) { return a + b; }, 0.0);
821 return sum / recorded_survival_ratios_.Count();
822 }
823
SurvivalEventsRecorded() const824 bool GCTracer::SurvivalEventsRecorded() const {
825 return recorded_survival_ratios_.Count() > 0;
826 }
827
ResetSurvivalEvents()828 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
829
NotifyIncrementalMarkingStart()830 void GCTracer::NotifyIncrementalMarkingStart() {
831 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
832 }
833
834 } // namespace internal
835 } // namespace v8
836