1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/memory-reducer.h"
6 
7 #include "src/flags.h"
8 #include "src/heap/gc-tracer.h"
9 #include "src/heap/heap-inl.h"
10 #include "src/utils.h"
11 #include "src/v8.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 const int MemoryReducer::kLongDelayMs = 8000;
17 const int MemoryReducer::kShortDelayMs = 500;
18 const int MemoryReducer::kWatchdogDelayMs = 100000;
19 const int MemoryReducer::kMaxNumberOfGCs = 3;
20 
TimerTask(MemoryReducer * memory_reducer)21 MemoryReducer::TimerTask::TimerTask(MemoryReducer* memory_reducer)
22     : CancelableTask(memory_reducer->heap()->isolate()),
23       memory_reducer_(memory_reducer) {}
24 
25 
RunInternal()26 void MemoryReducer::TimerTask::RunInternal() {
27   const double kJsCallsPerMsThreshold = 0.5;
28   Heap* heap = memory_reducer_->heap();
29   Event event;
30   double time_ms = heap->MonotonicallyIncreasingTimeInMs();
31   heap->tracer()->SampleAllocation(time_ms, heap->NewSpaceAllocationCounter(),
32                                    heap->OldGenerationAllocationCounter());
33   double js_call_rate = memory_reducer_->SampleAndGetJsCallsPerMs(time_ms);
34   bool low_allocation_rate = heap->HasLowAllocationRate();
35   bool is_idle = js_call_rate < kJsCallsPerMsThreshold && low_allocation_rate;
36   bool optimize_for_memory = heap->ShouldOptimizeForMemoryUsage();
37   if (FLAG_trace_gc_verbose) {
38     PrintIsolate(heap->isolate(), "Memory reducer: call rate %.3lf, %s, %s\n",
39                  js_call_rate, low_allocation_rate ? "low alloc" : "high alloc",
40                  optimize_for_memory ? "background" : "foreground");
41   }
42   event.type = kTimer;
43   event.time_ms = time_ms;
44   // The memory reducer will start incremental markig if
45   // 1) mutator is likely idle: js call rate is low and allocation rate is low.
46   // 2) mutator is in background: optimize for memory flag is set.
47   event.should_start_incremental_gc = is_idle || optimize_for_memory;
48   event.can_start_incremental_gc =
49       heap->incremental_marking()->IsStopped() &&
50       heap->incremental_marking()->CanBeActivated();
51   memory_reducer_->NotifyTimer(event);
52 }
53 
54 
SampleAndGetJsCallsPerMs(double time_ms)55 double MemoryReducer::SampleAndGetJsCallsPerMs(double time_ms) {
56   unsigned int counter = heap()->isolate()->js_calls_from_api_counter();
57   unsigned int call_delta = counter - js_calls_counter_;
58   double time_delta_ms = time_ms - js_calls_sample_time_ms_;
59   js_calls_counter_ = counter;
60   js_calls_sample_time_ms_ = time_ms;
61   return time_delta_ms > 0 ? call_delta / time_delta_ms : 0;
62 }
63 
64 
NotifyTimer(const Event & event)65 void MemoryReducer::NotifyTimer(const Event& event) {
66   DCHECK_EQ(kTimer, event.type);
67   DCHECK_EQ(kWait, state_.action);
68   state_ = Step(state_, event);
69   if (state_.action == kRun) {
70     DCHECK(heap()->incremental_marking()->IsStopped());
71     DCHECK(FLAG_incremental_marking);
72     if (FLAG_trace_gc_verbose) {
73       PrintIsolate(heap()->isolate(), "Memory reducer: started GC #%d\n",
74                    state_.started_gcs);
75     }
76     heap()->StartIdleIncrementalMarking();
77   } else if (state_.action == kWait) {
78     if (!heap()->incremental_marking()->IsStopped() &&
79         heap()->ShouldOptimizeForMemoryUsage()) {
80       // Make progress with pending incremental marking if memory usage has
81       // higher priority than latency. This is important for background tabs
82       // that do not send idle notifications.
83       const int kIncrementalMarkingDelayMs = 500;
84       double deadline = heap()->MonotonicallyIncreasingTimeInMs() +
85                         kIncrementalMarkingDelayMs;
86       heap()->incremental_marking()->AdvanceIncrementalMarking(
87           0, deadline, i::IncrementalMarking::StepActions(
88                            i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
89                            i::IncrementalMarking::FORCE_MARKING,
90                            i::IncrementalMarking::FORCE_COMPLETION));
91       heap()->FinalizeIncrementalMarkingIfComplete(
92           "Memory reducer: finalize incremental marking");
93     }
94     // Re-schedule the timer.
95     ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
96     if (FLAG_trace_gc_verbose) {
97       PrintIsolate(heap()->isolate(), "Memory reducer: waiting for %.f ms\n",
98                    state_.next_gc_start_ms - event.time_ms);
99     }
100   }
101 }
102 
103 
NotifyMarkCompact(const Event & event)104 void MemoryReducer::NotifyMarkCompact(const Event& event) {
105   DCHECK_EQ(kMarkCompact, event.type);
106   Action old_action = state_.action;
107   state_ = Step(state_, event);
108   if (old_action != kWait && state_.action == kWait) {
109     // If we are transitioning to the WAIT state, start the timer.
110     ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
111   }
112   if (old_action == kRun) {
113     if (FLAG_trace_gc_verbose) {
114       PrintIsolate(heap()->isolate(), "Memory reducer: finished GC #%d (%s)\n",
115                    state_.started_gcs,
116                    state_.action == kWait ? "will do more" : "done");
117     }
118   }
119 }
120 
121 
NotifyContextDisposed(const Event & event)122 void MemoryReducer::NotifyContextDisposed(const Event& event) {
123   DCHECK_EQ(kContextDisposed, event.type);
124   Action old_action = state_.action;
125   state_ = Step(state_, event);
126   if (old_action != kWait && state_.action == kWait) {
127     // If we are transitioning to the WAIT state, start the timer.
128     ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
129   }
130 }
131 
132 
WatchdogGC(const State & state,const Event & event)133 bool MemoryReducer::WatchdogGC(const State& state, const Event& event) {
134   return state.last_gc_time_ms != 0 &&
135          event.time_ms > state.last_gc_time_ms + kWatchdogDelayMs;
136 }
137 
138 
139 // For specification of this function see the comment for MemoryReducer class.
Step(const State & state,const Event & event)140 MemoryReducer::State MemoryReducer::Step(const State& state,
141                                          const Event& event) {
142   if (!FLAG_incremental_marking || !FLAG_memory_reducer) {
143     return State(kDone, 0, 0, state.last_gc_time_ms);
144   }
145   switch (state.action) {
146     case kDone:
147       if (event.type == kTimer) {
148         return state;
149       } else {
150         DCHECK(event.type == kContextDisposed || event.type == kMarkCompact);
151         return State(
152             kWait, 0, event.time_ms + kLongDelayMs,
153             event.type == kMarkCompact ? event.time_ms : state.last_gc_time_ms);
154       }
155     case kWait:
156       switch (event.type) {
157         case kContextDisposed:
158           return state;
159         case kTimer:
160           if (state.started_gcs >= kMaxNumberOfGCs) {
161             return State(kDone, kMaxNumberOfGCs, 0.0, state.last_gc_time_ms);
162           } else if (event.can_start_incremental_gc &&
163                      (event.should_start_incremental_gc ||
164                       WatchdogGC(state, event))) {
165             if (state.next_gc_start_ms <= event.time_ms) {
166               return State(kRun, state.started_gcs + 1, 0.0,
167                            state.last_gc_time_ms);
168             } else {
169               return state;
170             }
171           } else {
172             return State(kWait, state.started_gcs, event.time_ms + kLongDelayMs,
173                          state.last_gc_time_ms);
174           }
175         case kMarkCompact:
176           return State(kWait, state.started_gcs, event.time_ms + kLongDelayMs,
177                        event.time_ms);
178       }
179     case kRun:
180       if (event.type != kMarkCompact) {
181         return state;
182       } else {
183         if (state.started_gcs < kMaxNumberOfGCs &&
184             (event.next_gc_likely_to_collect_more || state.started_gcs == 1)) {
185           return State(kWait, state.started_gcs, event.time_ms + kShortDelayMs,
186                        event.time_ms);
187         } else {
188           return State(kDone, kMaxNumberOfGCs, 0.0, event.time_ms);
189         }
190       }
191   }
192   UNREACHABLE();
193   return State(kDone, 0, 0, 0.0);  // Make the compiler happy.
194 }
195 
196 
ScheduleTimer(double time_ms,double delay_ms)197 void MemoryReducer::ScheduleTimer(double time_ms, double delay_ms) {
198   DCHECK(delay_ms > 0);
199   // Record the time and the js call counter.
200   SampleAndGetJsCallsPerMs(time_ms);
201   // Leave some room for precision error in task scheduler.
202   const double kSlackMs = 100;
203   v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap()->isolate());
204   auto timer_task = new MemoryReducer::TimerTask(this);
205   V8::GetCurrentPlatform()->CallDelayedOnForegroundThread(
206       isolate, timer_task, (delay_ms + kSlackMs) / 1000.0);
207 }
208 
209 
TearDown()210 void MemoryReducer::TearDown() { state_ = State(kDone, 0, 0, 0.0); }
211 
212 }  // namespace internal
213 }  // namespace v8
214