1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_HEAP_H_
7 
8 #include <cmath>
9 #include <map>
10 
11 // Clients of this interface shouldn't depend on lots of heap internals.
12 // Do not include anything from src/heap here!
13 #include "include/v8.h"
14 #include "src/allocation.h"
15 #include "src/assert-scope.h"
16 #include "src/base/atomic-utils.h"
17 #include "src/globals.h"
18 #include "src/heap-symbols.h"
19 #include "src/list.h"
20 #include "src/objects.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 using v8::MemoryPressureLevel;
26 
27 // Defines all the roots in Heap.
28 #define STRONG_ROOT_LIST(V)                                                    \
29   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
30   /* The first 32 entries are most often used in the startup snapshot and   */ \
31   /* can use a shorter representation in the serialization format.          */ \
32   V(Map, free_space_map, FreeSpaceMap)                                         \
33   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
34   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
35   V(Oddball, uninitialized_value, UninitializedValue)                          \
36   V(Oddball, undefined_value, UndefinedValue)                                  \
37   V(Oddball, the_hole_value, TheHoleValue)                                     \
38   V(Oddball, null_value, NullValue)                                            \
39   V(Oddball, true_value, TrueValue)                                            \
40   V(Oddball, false_value, FalseValue)                                          \
41   V(String, empty_string, empty_string)                                        \
42   V(Map, meta_map, MetaMap)                                                    \
43   V(Map, byte_array_map, ByteArrayMap)                                         \
44   V(Map, fixed_array_map, FixedArrayMap)                                       \
45   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
46   V(Map, hash_table_map, HashTableMap)                                         \
47   V(Map, symbol_map, SymbolMap)                                                \
48   V(Map, one_byte_string_map, OneByteStringMap)                                \
49   V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
50   V(Map, scope_info_map, ScopeInfoMap)                                         \
51   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
52   V(Map, code_map, CodeMap)                                                    \
53   V(Map, function_context_map, FunctionContextMap)                             \
54   V(Map, cell_map, CellMap)                                                    \
55   V(Map, weak_cell_map, WeakCellMap)                                           \
56   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
57   V(Map, foreign_map, ForeignMap)                                              \
58   V(Map, heap_number_map, HeapNumberMap)                                       \
59   V(Map, transition_array_map, TransitionArrayMap)                             \
60   V(FixedArray, empty_literals_array, EmptyLiteralsArray)                      \
61   V(FixedArray, empty_type_feedback_vector, EmptyTypeFeedbackVector)           \
62   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
63   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
64   /* Entries beyond the first 32                                            */ \
65   /* The roots above this line should be boring from a GC point of view.    */ \
66   /* This means they are never in new space and never on a page that is     */ \
67   /* being compacted.                                                       */ \
68   /* Empty scope info */                                                       \
69   V(ScopeInfo, empty_scope_info, EmptyScopeInfo)                               \
70   /* Oddballs */                                                               \
71   V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
72   V(Oddball, arguments_marker, ArgumentsMarker)                                \
73   V(Oddball, exception, Exception)                                             \
74   V(Oddball, termination_exception, TerminationException)                      \
75   V(Oddball, optimized_out, OptimizedOut)                                      \
76   V(Oddball, stale_register, StaleRegister)                                    \
77   /* Context maps */                                                           \
78   V(Map, native_context_map, NativeContextMap)                                 \
79   V(Map, module_context_map, ModuleContextMap)                                 \
80   V(Map, script_context_map, ScriptContextMap)                                 \
81   V(Map, block_context_map, BlockContextMap)                                   \
82   V(Map, catch_context_map, CatchContextMap)                                   \
83   V(Map, with_context_map, WithContextMap)                                     \
84   V(Map, debug_evaluate_context_map, DebugEvaluateContextMap)                  \
85   V(Map, script_context_table_map, ScriptContextTableMap)                      \
86   /* Maps */                                                                   \
87   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
88   V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
89   V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
90   V(Map, unseeded_number_dictionary_map, UnseededNumberDictionaryMap)          \
91   V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
92   V(Map, message_object_map, JSMessageObjectMap)                               \
93   V(Map, external_map, ExternalMap)                                            \
94   V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
95   V(Map, module_info_map, ModuleInfoMap)                                       \
96   /* String maps */                                                            \
97   V(Map, native_source_string_map, NativeSourceStringMap)                      \
98   V(Map, string_map, StringMap)                                                \
99   V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
100   V(Map, cons_string_map, ConsStringMap)                                       \
101   V(Map, sliced_string_map, SlicedStringMap)                                   \
102   V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
103   V(Map, external_string_map, ExternalStringMap)                               \
104   V(Map, external_string_with_one_byte_data_map,                               \
105     ExternalStringWithOneByteDataMap)                                          \
106   V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
107   V(Map, short_external_string_map, ShortExternalStringMap)                    \
108   V(Map, short_external_string_with_one_byte_data_map,                         \
109     ShortExternalStringWithOneByteDataMap)                                     \
110   V(Map, internalized_string_map, InternalizedStringMap)                       \
111   V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
112   V(Map, external_internalized_string_with_one_byte_data_map,                  \
113     ExternalInternalizedStringWithOneByteDataMap)                              \
114   V(Map, external_one_byte_internalized_string_map,                            \
115     ExternalOneByteInternalizedStringMap)                                      \
116   V(Map, short_external_internalized_string_map,                               \
117     ShortExternalInternalizedStringMap)                                        \
118   V(Map, short_external_internalized_string_with_one_byte_data_map,            \
119     ShortExternalInternalizedStringWithOneByteDataMap)                         \
120   V(Map, short_external_one_byte_internalized_string_map,                      \
121     ShortExternalOneByteInternalizedStringMap)                                 \
122   V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
123   /* Array element maps */                                                     \
124   V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
125   V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
126   V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
127   V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
128   V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
129   V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
130   V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
131   V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
132   V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
133   V(Map, float32x4_map, Float32x4Map)                                          \
134   V(Map, int32x4_map, Int32x4Map)                                              \
135   V(Map, uint32x4_map, Uint32x4Map)                                            \
136   V(Map, bool32x4_map, Bool32x4Map)                                            \
137   V(Map, int16x8_map, Int16x8Map)                                              \
138   V(Map, uint16x8_map, Uint16x8Map)                                            \
139   V(Map, bool16x8_map, Bool16x8Map)                                            \
140   V(Map, int8x16_map, Int8x16Map)                                              \
141   V(Map, uint8x16_map, Uint8x16Map)                                            \
142   V(Map, bool8x16_map, Bool8x16Map)                                            \
143   /* Canonical empty values */                                                 \
144   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
145   V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
146   V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
147   V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
148   V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
149   V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
150   V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
151   V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
152   V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
153   V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
154     EmptyFixedUint8ClampedArray)                                               \
155   V(Script, empty_script, EmptyScript)                                         \
156   V(Cell, undefined_cell, UndefinedCell)                                       \
157   V(FixedArray, empty_sloppy_arguments_elements, EmptySloppyArgumentsElements) \
158   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
159     EmptySlowElementDictionary)                                                \
160   V(TypeFeedbackVector, dummy_vector, DummyVector)                             \
161   V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
162   V(WeakCell, empty_weak_cell, EmptyWeakCell)                                  \
163   /* Protectors */                                                             \
164   V(PropertyCell, array_protector, ArrayProtector)                             \
165   V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector)         \
166   V(PropertyCell, has_instance_protector, HasInstanceProtector)                \
167   V(Cell, species_protector, SpeciesProtector)                                 \
168   V(PropertyCell, string_length_protector, StringLengthProtector)              \
169   V(Cell, fast_array_iteration_protector, FastArrayIterationProtector)         \
170   V(Cell, array_iterator_protector, ArrayIteratorProtector)                    \
171   /* Special numbers */                                                        \
172   V(HeapNumber, nan_value, NanValue)                                           \
173   V(HeapNumber, hole_nan_value, HoleNanValue)                                  \
174   V(HeapNumber, infinity_value, InfinityValue)                                 \
175   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
176   V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
177   /* Caches */                                                                 \
178   V(FixedArray, number_string_cache, NumberStringCache)                        \
179   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
180   V(FixedArray, string_split_cache, StringSplitCache)                          \
181   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
182   V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
183   V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
184   V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
185   V(FixedArray, natives_source_cache, NativesSourceCache)                      \
186   V(FixedArray, experimental_natives_source_cache,                             \
187     ExperimentalNativesSourceCache)                                            \
188   V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache)           \
189   V(FixedArray, experimental_extra_natives_source_cache,                       \
190     ExperimentalExtraNativesSourceCache)                                       \
191   /* Lists and dictionaries */                                                 \
192   V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary)    \
193   V(Object, symbol_registry, SymbolRegistry)                                   \
194   V(Object, script_list, ScriptList)                                           \
195   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
196   V(FixedArray, materialized_objects, MaterializedObjects)                     \
197   V(FixedArray, microtask_queue, MicrotaskQueue)                               \
198   V(FixedArray, detached_contexts, DetachedContexts)                           \
199   V(ArrayList, retained_maps, RetainedMaps)                                    \
200   V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
201   /* weak_new_space_object_to_code_list is an array of weak cells, where */    \
202   /* slots with even indices refer to the weak object, and the subsequent */   \
203   /* slots refer to the code with the reference to the weak object. */         \
204   V(ArrayList, weak_new_space_object_to_code_list,                             \
205     WeakNewSpaceObjectToCodeList)                                              \
206   V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
207   V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos)       \
208   V(FixedArray, serialized_templates, SerializedTemplates)                     \
209   /* Configured values */                                                      \
210   V(TemplateList, message_listeners, MessageListeners)                         \
211   V(Code, js_entry_code, JsEntryCode)                                          \
212   V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
213   /* Oddball maps */                                                           \
214   V(Map, undefined_map, UndefinedMap)                                          \
215   V(Map, the_hole_map, TheHoleMap)                                             \
216   V(Map, null_map, NullMap)                                                    \
217   V(Map, boolean_map, BooleanMap)                                              \
218   V(Map, uninitialized_map, UninitializedMap)                                  \
219   V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
220   V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
221   V(Map, exception_map, ExceptionMap)                                          \
222   V(Map, termination_exception_map, TerminationExceptionMap)                   \
223   V(Map, optimized_out_map, OptimizedOutMap)                                   \
224   V(Map, stale_register_map, StaleRegisterMap)
225 
226 // Entries in this list are limited to Smis and are not visited during GC.
227 #define SMI_ROOT_LIST(V)                                                       \
228   V(Smi, stack_limit, StackLimit)                                              \
229   V(Smi, real_stack_limit, RealStackLimit)                                     \
230   V(Smi, last_script_id, LastScriptId)                                         \
231   V(Smi, hash_seed, HashSeed)                                                  \
232   /* To distinguish the function templates, so that we can find them in the */ \
233   /* function cache of the native context. */                                  \
234   V(Smi, next_template_serial_number, NextTemplateSerialNumber)                \
235   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
236   V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)           \
237   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
238   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
239   V(Smi, interpreter_entry_return_pc_offset, InterpreterEntryReturnPCOffset)
240 
241 #define ROOT_LIST(V)  \
242   STRONG_ROOT_LIST(V) \
243   SMI_ROOT_LIST(V)    \
244   V(StringTable, string_table, StringTable)
245 
246 
247 // Heap roots that are known to be immortal immovable, for which we can safely
248 // skip write barriers. This list is not complete and has omissions.
249 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
250   V(ByteArrayMap)                       \
251   V(BytecodeArrayMap)                   \
252   V(FreeSpaceMap)                       \
253   V(OnePointerFillerMap)                \
254   V(TwoPointerFillerMap)                \
255   V(UndefinedValue)                     \
256   V(TheHoleValue)                       \
257   V(NullValue)                          \
258   V(TrueValue)                          \
259   V(FalseValue)                         \
260   V(UninitializedValue)                 \
261   V(CellMap)                            \
262   V(GlobalPropertyCellMap)              \
263   V(SharedFunctionInfoMap)              \
264   V(MetaMap)                            \
265   V(HeapNumberMap)                      \
266   V(MutableHeapNumberMap)               \
267   V(Float32x4Map)                       \
268   V(Int32x4Map)                         \
269   V(Uint32x4Map)                        \
270   V(Bool32x4Map)                        \
271   V(Int16x8Map)                         \
272   V(Uint16x8Map)                        \
273   V(Bool16x8Map)                        \
274   V(Int8x16Map)                         \
275   V(Uint8x16Map)                        \
276   V(Bool8x16Map)                        \
277   V(NativeContextMap)                   \
278   V(FixedArrayMap)                      \
279   V(CodeMap)                            \
280   V(ScopeInfoMap)                       \
281   V(ModuleInfoMap)                      \
282   V(FixedCOWArrayMap)                   \
283   V(FixedDoubleArrayMap)                \
284   V(WeakCellMap)                        \
285   V(TransitionArrayMap)                 \
286   V(NoInterceptorResultSentinel)        \
287   V(HashTableMap)                       \
288   V(OrderedHashTableMap)                \
289   V(EmptyFixedArray)                    \
290   V(EmptyByteArray)                     \
291   V(EmptyDescriptorArray)               \
292   V(ArgumentsMarker)                    \
293   V(SymbolMap)                          \
294   V(SloppyArgumentsElementsMap)         \
295   V(FunctionContextMap)                 \
296   V(CatchContextMap)                    \
297   V(WithContextMap)                     \
298   V(BlockContextMap)                    \
299   V(ModuleContextMap)                   \
300   V(ScriptContextMap)                   \
301   V(UndefinedMap)                       \
302   V(TheHoleMap)                         \
303   V(NullMap)                            \
304   V(BooleanMap)                         \
305   V(UninitializedMap)                   \
306   V(ArgumentsMarkerMap)                 \
307   V(JSMessageObjectMap)                 \
308   V(ForeignMap)                         \
309   V(NanValue)                           \
310   V(InfinityValue)                      \
311   V(MinusZeroValue)                     \
312   V(MinusInfinityValue)                 \
313   V(EmptyWeakCell)                      \
314   V(empty_string)                       \
315   PRIVATE_SYMBOL_LIST(V)
316 
317 // Forward declarations.
318 class AllocationObserver;
319 class ArrayBufferTracker;
320 class GCIdleTimeAction;
321 class GCIdleTimeHandler;
322 class GCIdleTimeHeapState;
323 class GCTracer;
324 class HeapObjectsFilter;
325 class HeapStats;
326 class HistogramTimer;
327 class Isolate;
328 class MemoryAllocator;
329 class MemoryReducer;
330 class ObjectIterator;
331 class ObjectStats;
332 class Page;
333 class PagedSpace;
334 class Scavenger;
335 class ScavengeJob;
336 class Space;
337 class StoreBuffer;
338 class TracePossibleWrapperReporter;
339 class WeakObjectRetainer;
340 
341 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
342 
343 enum ArrayStorageAllocationMode {
344   DONT_INITIALIZE_ARRAY_ELEMENTS,
345   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
346 };
347 
348 enum class ClearRecordedSlots { kYes, kNo };
349 
350 enum class ClearBlackArea { kYes, kNo };
351 
352 enum class GarbageCollectionReason {
353   kUnknown = 0,
354   kAllocationFailure = 1,
355   kAllocationLimit = 2,
356   kContextDisposal = 3,
357   kCountersExtension = 4,
358   kDebugger = 5,
359   kDeserializer = 6,
360   kExternalMemoryPressure = 7,
361   kFinalizeMarkingViaStackGuard = 8,
362   kFinalizeMarkingViaTask = 9,
363   kFullHashtable = 10,
364   kHeapProfiler = 11,
365   kIdleTask = 12,
366   kLastResort = 13,
367   kLowMemoryNotification = 14,
368   kMakeHeapIterable = 15,
369   kMemoryPressure = 16,
370   kMemoryReducer = 17,
371   kRuntime = 18,
372   kSamplingProfiler = 19,
373   kSnapshotCreator = 20,
374   kTesting = 21
375   // If you add new items here, then update the incremental_marking_reason,
376   // mark_compact_reason, and scavenge_reason counters in counters.h.
377   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
378 };
379 
380 // A queue of objects promoted during scavenge. Each object is accompanied by
381 // its size to avoid dereferencing a map pointer for scanning. The last page in
382 // to-space is used for the promotion queue. On conflict during scavenge, the
383 // promotion queue is allocated externally and all entries are copied to the
384 // external queue.
385 class PromotionQueue {
386  public:
PromotionQueue(Heap * heap)387   explicit PromotionQueue(Heap* heap)
388       : front_(nullptr),
389         rear_(nullptr),
390         limit_(nullptr),
391         emergency_stack_(nullptr),
392         heap_(heap) {}
393 
394   void Initialize();
395   void Destroy();
396 
397   inline void SetNewLimit(Address limit);
398   inline bool IsBelowPromotionQueue(Address to_space_top);
399 
400   inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
401   inline void remove(HeapObject** target, int32_t* size,
402                      bool* was_marked_black);
403 
is_empty()404   bool is_empty() {
405     return (front_ == rear_) &&
406            (emergency_stack_ == nullptr || emergency_stack_->length() == 0);
407   }
408 
409  private:
410   struct Entry {
EntryEntry411     Entry(HeapObject* obj, int32_t size, bool was_marked_black)
412         : obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
413 
414     HeapObject* obj_;
415     int32_t size_ : 31;
416     bool was_marked_black_ : 1;
417   };
418 
419   inline Page* GetHeadPage();
420 
421   void RelocateQueueHead();
422 
423   // The front of the queue is higher in the memory page chain than the rear.
424   struct Entry* front_;
425   struct Entry* rear_;
426   struct Entry* limit_;
427 
428   List<Entry>* emergency_stack_;
429   Heap* heap_;
430 
431   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
432 };
433 
434 class AllocationResult {
435  public:
436   static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
437     return AllocationResult(space);
438   }
439 
440   // Implicit constructor from Object*.
AllocationResult(Object * object)441   AllocationResult(Object* object)  // NOLINT
442       : object_(object) {
443     // AllocationResults can't return Smis, which are used to represent
444     // failure and the space to retry in.
445     CHECK(!object->IsSmi());
446   }
447 
AllocationResult()448   AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
449 
IsRetry()450   inline bool IsRetry() { return object_->IsSmi(); }
451   inline HeapObject* ToObjectChecked();
452   inline AllocationSpace RetrySpace();
453 
454   template <typename T>
To(T ** obj)455   bool To(T** obj) {
456     if (IsRetry()) return false;
457     *obj = T::cast(object_);
458     return true;
459   }
460 
461  private:
AllocationResult(AllocationSpace space)462   explicit AllocationResult(AllocationSpace space)
463       : object_(Smi::FromInt(static_cast<int>(space))) {}
464 
465   Object* object_;
466 };
467 
468 STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
469 
470 #ifdef DEBUG
471 struct CommentStatistic {
472   const char* comment;
473   int size;
474   int count;
ClearCommentStatistic475   void Clear() {
476     comment = NULL;
477     size = 0;
478     count = 0;
479   }
480   // Must be small, since an iteration is used for lookup.
481   static const int kMaxComments = 64;
482 };
483 #endif
484 
485 class NumberAndSizeInfo BASE_EMBEDDED {
486  public:
NumberAndSizeInfo()487   NumberAndSizeInfo() : number_(0), bytes_(0) {}
488 
number()489   int number() const { return number_; }
increment_number(int num)490   void increment_number(int num) { number_ += num; }
491 
bytes()492   int bytes() const { return bytes_; }
increment_bytes(int size)493   void increment_bytes(int size) { bytes_ += size; }
494 
clear()495   void clear() {
496     number_ = 0;
497     bytes_ = 0;
498   }
499 
500  private:
501   int number_;
502   int bytes_;
503 };
504 
505 // HistogramInfo class for recording a single "bar" of a histogram.  This
506 // class is used for collecting statistics to print to the log file.
507 class HistogramInfo : public NumberAndSizeInfo {
508  public:
HistogramInfo()509   HistogramInfo() : NumberAndSizeInfo(), name_(nullptr) {}
510 
name()511   const char* name() { return name_; }
set_name(const char * name)512   void set_name(const char* name) { name_ = name; }
513 
514  private:
515   const char* name_;
516 };
517 
518 class Heap {
519  public:
520   // Declare all the root indices.  This defines the root list order.
521   enum RootListIndex {
522 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
523     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
524 #undef ROOT_INDEX_DECLARATION
525 
526 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
527         INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
528 #undef STRING_DECLARATION
529 
530 #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
531             PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
532 #undef SYMBOL_INDEX_DECLARATION
533 
534 #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
535                 PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
536                     WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
537 #undef SYMBOL_INDEX_DECLARATION
538 
539 // Utility type maps
540 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
541                         STRUCT_LIST(DECLARE_STRUCT_MAP)
542 #undef DECLARE_STRUCT_MAP
543                             kStringTableRootIndex,
544 
545 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
546     SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
547 #undef ROOT_INDEX_DECLARATION
548         kRootListLength,
549     kStrongRootListLength = kStringTableRootIndex,
550     kSmiRootsStart = kStringTableRootIndex + 1
551   };
552 
553   enum FindMementoMode { kForRuntime, kForGC };
554 
555   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
556 
557   // Indicates whether live bytes adjustment is triggered
558   // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
559   // - or from within GC (CONCURRENT_TO_SWEEPER),
560   // - or mutator code (CONCURRENT_TO_SWEEPER).
561   enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
562 
563   enum UpdateAllocationSiteMode { kGlobal, kCached };
564 
565   // Taking this lock prevents the GC from entering a phase that relocates
566   // object references.
567   class RelocationLock {
568    public:
RelocationLock(Heap * heap)569     explicit RelocationLock(Heap* heap) : heap_(heap) {
570       heap_->relocation_mutex_.Lock();
571     }
572 
~RelocationLock()573     ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
574 
575    private:
576     Heap* heap_;
577   };
578 
579   // Support for partial snapshots.  After calling this we have a linear
580   // space to write objects in each space.
581   struct Chunk {
582     uint32_t size;
583     Address start;
584     Address end;
585   };
586   typedef List<Chunk> Reservation;
587 
588   static const int kInitalOldGenerationLimitFactor = 2;
589 
590 #if V8_OS_ANDROID
591   // Don't apply pointer multiplier on Android since it has no swap space and
592   // should instead adapt it's heap size based on available physical memory.
593   static const int kPointerMultiplier = 1;
594 #else
595   static const int kPointerMultiplier = i::kPointerSize / 4;
596 #endif
597 
598   // The new space size has to be a power of 2. Sizes are in MB.
599   static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
600   static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
601   static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
602   static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
603 
604   // The old space size has to be a multiple of Page::kPageSize.
605   // Sizes are in MB.
606   static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
607   static const int kMaxOldSpaceSizeMediumMemoryDevice =
608       256 * kPointerMultiplier;
609   static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
610   static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
611 
612   // The executable size has to be a multiple of Page::kPageSize.
613   // Sizes are in MB.
614   static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
615   static const int kMaxExecutableSizeMediumMemoryDevice =
616       192 * kPointerMultiplier;
617   static const int kMaxExecutableSizeHighMemoryDevice =
618       256 * kPointerMultiplier;
619   static const int kMaxExecutableSizeHugeMemoryDevice =
620       256 * kPointerMultiplier;
621 
622   static const int kTraceRingBufferSize = 512;
623   static const int kStacktraceBufferSize = 512;
624 
625   V8_EXPORT_PRIVATE static const double kMinHeapGrowingFactor;
626   V8_EXPORT_PRIVATE static const double kMaxHeapGrowingFactor;
627   static const double kMaxHeapGrowingFactorMemoryConstrained;
628   static const double kMaxHeapGrowingFactorIdle;
629   static const double kConservativeHeapGrowingFactor;
630   static const double kTargetMutatorUtilization;
631 
632   static const int kNoGCFlags = 0;
633   static const int kReduceMemoryFootprintMask = 1;
634   static const int kAbortIncrementalMarkingMask = 2;
635   static const int kFinalizeIncrementalMarkingMask = 4;
636 
637   // Making the heap iterable requires us to abort incremental marking.
638   static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
639 
640   // The roots that have an index less than this are always in old space.
641   static const int kOldSpaceRoots = 0x20;
642 
643   // The minimum size of a HeapObject on the heap.
644   static const int kMinObjectSizeInWords = 2;
645 
646   STATIC_ASSERT(kUndefinedValueRootIndex ==
647                 Internals::kUndefinedValueRootIndex);
648   STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
649   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
650   STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
651   STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
652   STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
653 
654   // Calculates the maximum amount of filler that could be required by the
655   // given alignment.
656   static int GetMaximumFillToAlign(AllocationAlignment alignment);
657   // Calculates the actual amount of filler required for a given address at the
658   // given alignment.
659   static int GetFillToAlign(Address address, AllocationAlignment alignment);
660 
661   template <typename T>
662   static inline bool IsOneByte(T t, int chars);
663 
664   static void FatalProcessOutOfMemory(const char* location,
665                                       bool is_heap_oom = false);
666 
667   static bool RootIsImmortalImmovable(int root_index);
668 
669   // Checks whether the space is valid.
670   static bool IsValidAllocationSpace(AllocationSpace space);
671 
672   // Generated code can embed direct references to non-writable roots if
673   // they are in new space.
674   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
675 
676   // Zapping is needed for verify heap, and always done in debug builds.
ShouldZapGarbage()677   static inline bool ShouldZapGarbage() {
678 #ifdef DEBUG
679     return true;
680 #else
681 #ifdef VERIFY_HEAP
682     return FLAG_verify_heap;
683 #else
684     return false;
685 #endif
686 #endif
687   }
688 
IsYoungGenerationCollector(GarbageCollector collector)689   static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
690     return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
691   }
692 
YoungGenerationCollector()693   static inline GarbageCollector YoungGenerationCollector() {
694     return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
695   }
696 
CollectorName(GarbageCollector collector)697   static inline const char* CollectorName(GarbageCollector collector) {
698     switch (collector) {
699       case SCAVENGER:
700         return "Scavenger";
701       case MARK_COMPACTOR:
702         return "Mark-Compact";
703       case MINOR_MARK_COMPACTOR:
704         return "Minor Mark-Compact";
705     }
706     return "Unknown collector";
707   }
708 
709   V8_EXPORT_PRIVATE static double HeapGrowingFactor(double gc_speed,
710                                                     double mutator_speed);
711 
712   // Copy block of memory from src to dst. Size of block should be aligned
713   // by pointer size.
714   static inline void CopyBlock(Address dst, Address src, int byte_size);
715 
716   // Determines a static visitor id based on the given {map} that can then be
717   // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
718   static int GetStaticVisitorIdForMap(Map* map);
719 
720   // Notifies the heap that is ok to start marking or other activities that
721   // should not happen during deserialization.
722   void NotifyDeserializationComplete();
723 
724   inline Address* NewSpaceAllocationTopAddress();
725   inline Address* NewSpaceAllocationLimitAddress();
726   inline Address* OldSpaceAllocationTopAddress();
727   inline Address* OldSpaceAllocationLimitAddress();
728 
729   // Clear the Instanceof cache (used when a prototype changes).
730   inline void ClearInstanceofCache();
731 
732   // FreeSpace objects have a null map after deserialization. Update the map.
733   void RepairFreeListsAfterDeserialization();
734 
735   // Move len elements within a given array from src_index index to dst_index
736   // index.
737   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
738 
739   // Initialize a filler object to keep the ability to iterate over the heap
740   // when introducing gaps within pages. If slots could have been recorded in
741   // the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
742   // pass ClearRecordedSlots::kNo. If the filler was created in a black area
743   // we may want to clear the corresponding mark bits with ClearBlackArea::kYes,
744   // which is the default. ClearBlackArea::kNo does not clear the mark bits.
745   void CreateFillerObjectAt(
746       Address addr, int size, ClearRecordedSlots mode,
747       ClearBlackArea black_area_mode = ClearBlackArea::kYes);
748 
749   bool CanMoveObjectStart(HeapObject* object);
750 
751   // Maintain consistency of live bytes during incremental marking.
752   void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
753 
754   // Trim the given array from the left. Note that this relocates the object
755   // start and hence is only valid if there is only a single reference to it.
756   FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
757 
758   // Trim the given array from the right.
759   template<Heap::InvocationMode mode>
760   void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
761 
762   // Converts the given boolean condition to JavaScript boolean value.
763   inline Oddball* ToBoolean(bool condition);
764 
765   // Check whether the heap is currently iterable.
766   bool IsHeapIterable();
767 
768   // Notify the heap that a context has been disposed.
769   int NotifyContextDisposed(bool dependant_context);
770 
set_native_contexts_list(Object * object)771   void set_native_contexts_list(Object* object) {
772     native_contexts_list_ = object;
773   }
native_contexts_list()774   Object* native_contexts_list() const { return native_contexts_list_; }
775 
set_allocation_sites_list(Object * object)776   void set_allocation_sites_list(Object* object) {
777     allocation_sites_list_ = object;
778   }
allocation_sites_list()779   Object* allocation_sites_list() { return allocation_sites_list_; }
780 
781   // Used in CreateAllocationSiteStub and the (de)serializer.
allocation_sites_list_address()782   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
783 
set_encountered_weak_collections(Object * weak_collection)784   void set_encountered_weak_collections(Object* weak_collection) {
785     encountered_weak_collections_ = weak_collection;
786   }
encountered_weak_collections()787   Object* encountered_weak_collections() const {
788     return encountered_weak_collections_;
789   }
790 
set_encountered_weak_cells(Object * weak_cell)791   void set_encountered_weak_cells(Object* weak_cell) {
792     encountered_weak_cells_ = weak_cell;
793   }
encountered_weak_cells()794   Object* encountered_weak_cells() const { return encountered_weak_cells_; }
795 
set_encountered_transition_arrays(Object * transition_array)796   void set_encountered_transition_arrays(Object* transition_array) {
797     encountered_transition_arrays_ = transition_array;
798   }
encountered_transition_arrays()799   Object* encountered_transition_arrays() const {
800     return encountered_transition_arrays_;
801   }
802 
803   // Number of mark-sweeps.
ms_count()804   int ms_count() const { return ms_count_; }
805 
806   // Checks whether the given object is allowed to be migrated from it's
807   // current space into the given destination space. Used for debugging.
808   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
809 
810   void CheckHandleCount();
811 
812   // Number of "runtime allocations" done so far.
allocations_count()813   uint32_t allocations_count() { return allocations_count_; }
814 
815   // Print short heap statistics.
816   void PrintShortHeapStatistics();
817 
gc_state()818   inline HeapState gc_state() { return gc_state_; }
819 
IsInGCPostProcessing()820   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
821 
822   // If an object has an AllocationMemento trailing it, return it, otherwise
823   // return NULL;
824   template <FindMementoMode mode>
825   inline AllocationMemento* FindAllocationMemento(HeapObject* object);
826 
827   // Returns false if not able to reserve.
828   bool ReserveSpace(Reservation* reservations, List<Address>* maps);
829 
830   //
831   // Support for the API.
832   //
833 
834   void CreateApiObjects();
835 
836   // Implements the corresponding V8 API function.
837   bool IdleNotification(double deadline_in_seconds);
838   bool IdleNotification(int idle_time_in_ms);
839 
840   void MemoryPressureNotification(MemoryPressureLevel level,
841                                   bool is_isolate_locked);
842   void CheckMemoryPressure();
843 
844   double MonotonicallyIncreasingTimeInMs();
845 
846   void RecordStats(HeapStats* stats, bool take_snapshot = false);
847 
848   // Check new space expansion criteria and expand semispaces if it was hit.
849   void CheckNewSpaceExpansionCriteria();
850 
851   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
852 
853   // An object should be promoted if the object has survived a
854   // scavenge operation.
855   inline bool ShouldBePromoted(Address old_address, int object_size);
856 
857   void ClearNormalizedMapCaches();
858 
859   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
860 
861   // Completely clear the Instanceof cache (to stop it keeping objects alive
862   // around a GC).
863   inline void CompletelyClearInstanceofCache();
864 
865   inline uint32_t HashSeed();
866 
867   inline int NextScriptId();
868 
869   inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
870   inline void SetConstructStubDeoptPCOffset(int pc_offset);
871   inline void SetGetterStubDeoptPCOffset(int pc_offset);
872   inline void SetSetterStubDeoptPCOffset(int pc_offset);
873   inline void SetInterpreterEntryReturnPCOffset(int pc_offset);
874   inline int GetNextTemplateSerialNumber();
875 
876   inline void SetSerializedTemplates(FixedArray* templates);
877 
878   // For post mortem debugging.
879   void RememberUnmappedPage(Address page, bool compacted);
880 
881   // Global inline caching age: it is incremented on some GCs after context
882   // disposal. We use it to flush inline caches.
global_ic_age()883   int global_ic_age() { return global_ic_age_; }
884 
AgeInlineCaches()885   void AgeInlineCaches() {
886     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
887   }
888 
external_memory_hard_limit()889   int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
890 
external_memory()891   int64_t external_memory() { return external_memory_; }
update_external_memory(int64_t delta)892   void update_external_memory(int64_t delta) { external_memory_ += delta; }
893 
update_external_memory_concurrently_freed(intptr_t freed)894   void update_external_memory_concurrently_freed(intptr_t freed) {
895     external_memory_concurrently_freed_.Increment(freed);
896   }
897 
account_external_memory_concurrently_freed()898   void account_external_memory_concurrently_freed() {
899     external_memory_ -= external_memory_concurrently_freed_.Value();
900     external_memory_concurrently_freed_.SetValue(0);
901   }
902 
903   void DeoptMarkedAllocationSites();
904 
905   inline bool DeoptMaybeTenuredAllocationSites();
906 
907   void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
908                                              Handle<WeakCell> code);
909 
910   void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
911                                      Handle<DependentCode> dep);
912 
913   DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
914 
915   void CompactWeakFixedArrays();
916 
917   void AddRetainedMap(Handle<Map> map);
918 
919   // This event is triggered after successful allocation of a new object made
920   // by runtime. Allocations of target space for object evacuation do not
921   // trigger the event. In order to track ALL allocations one must turn off
922   // FLAG_inline_new and FLAG_use_allocation_folding.
923   inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
924 
925   // This event is triggered after object is moved to a new place.
926   inline void OnMoveEvent(HeapObject* target, HeapObject* source,
927                           int size_in_bytes);
928 
deserialization_complete()929   bool deserialization_complete() const { return deserialization_complete_; }
930 
931   bool HasLowAllocationRate();
932   bool HasHighFragmentation();
933   bool HasHighFragmentation(size_t used, size_t committed);
934 
935   void ActivateMemoryReducerIfNeeded();
936 
937   bool ShouldOptimizeForMemoryUsage();
938 
IsLowMemoryDevice()939   bool IsLowMemoryDevice() {
940     return max_old_generation_size_ <= kMaxOldSpaceSizeLowMemoryDevice;
941   }
942 
IsMemoryConstrainedDevice()943   bool IsMemoryConstrainedDevice() {
944     return max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice;
945   }
946 
HighMemoryPressure()947   bool HighMemoryPressure() {
948     return memory_pressure_level_.Value() != MemoryPressureLevel::kNone;
949   }
950 
951   // ===========================================================================
952   // Initialization. ===========================================================
953   // ===========================================================================
954 
955   // Configure heap size in MB before setup. Return false if the heap has been
956   // set up already.
957   bool ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
958                      size_t max_executable_size, size_t code_range_size);
959   bool ConfigureHeapDefault();
960 
961   // Prepares the heap, setting up memory areas that are needed in the isolate
962   // without actually creating any objects.
963   bool SetUp();
964 
965   // Bootstraps the object heap with the core set of objects required to run.
966   // Returns whether it succeeded.
967   bool CreateHeapObjects();
968 
969   // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
970   V8_INLINE void CreateObjectStats();
971 
972   // Destroys all memory allocated by the heap.
973   void TearDown();
974 
975   // Returns whether SetUp has been called.
976   bool HasBeenSetUp();
977 
978   // ===========================================================================
979   // Getters for spaces. =======================================================
980   // ===========================================================================
981 
982   inline Address NewSpaceTop();
983 
new_space()984   NewSpace* new_space() { return new_space_; }
old_space()985   OldSpace* old_space() { return old_space_; }
code_space()986   OldSpace* code_space() { return code_space_; }
map_space()987   MapSpace* map_space() { return map_space_; }
lo_space()988   LargeObjectSpace* lo_space() { return lo_space_; }
989 
990   inline PagedSpace* paged_space(int idx);
991   inline Space* space(int idx);
992 
993   // Returns name of the space.
994   const char* GetSpaceName(int idx);
995 
996   // ===========================================================================
997   // Getters to other components. ==============================================
998   // ===========================================================================
999 
tracer()1000   GCTracer* tracer() { return tracer_; }
1001 
memory_allocator()1002   MemoryAllocator* memory_allocator() { return memory_allocator_; }
1003 
promotion_queue()1004   PromotionQueue* promotion_queue() { return &promotion_queue_; }
1005 
1006   inline Isolate* isolate();
1007 
mark_compact_collector()1008   MarkCompactCollector* mark_compact_collector() {
1009     return mark_compact_collector_;
1010   }
1011 
1012   // ===========================================================================
1013   // Root set access. ==========================================================
1014   // ===========================================================================
1015 
1016   // Heap root getters.
1017 #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
1018   ROOT_LIST(ROOT_ACCESSOR)
1019 #undef ROOT_ACCESSOR
1020 
1021   // Utility type maps.
1022 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
STRUCT_LIST(STRUCT_MAP_ACCESSOR)1023   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1024 #undef STRUCT_MAP_ACCESSOR
1025 
1026 #define STRING_ACCESSOR(name, str) inline String* name();
1027   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
1028 #undef STRING_ACCESSOR
1029 
1030 #define SYMBOL_ACCESSOR(name) inline Symbol* name();
1031   PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
1032 #undef SYMBOL_ACCESSOR
1033 
1034 #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
1035   PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
1036   WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
1037 #undef SYMBOL_ACCESSOR
1038 
1039   Object* root(RootListIndex index) { return roots_[index]; }
root_handle(RootListIndex index)1040   Handle<Object> root_handle(RootListIndex index) {
1041     return Handle<Object>(&roots_[index]);
1042   }
1043   template <typename T>
IsRootHandle(Handle<T> handle,RootListIndex * index)1044   bool IsRootHandle(Handle<T> handle, RootListIndex* index) const {
1045     Object** const handle_location = bit_cast<Object**>(handle.address());
1046     if (handle_location >= &roots_[kRootListLength]) return false;
1047     if (handle_location < &roots_[0]) return false;
1048     *index = static_cast<RootListIndex>(handle_location - &roots_[0]);
1049     return true;
1050   }
1051 
1052   // Generated code can embed this address to get access to the roots.
roots_array_start()1053   Object** roots_array_start() { return roots_; }
1054 
1055   // Sets the stub_cache_ (only used when expanding the dictionary).
SetRootCodeStubs(UnseededNumberDictionary * value)1056   void SetRootCodeStubs(UnseededNumberDictionary* value) {
1057     roots_[kCodeStubsRootIndex] = value;
1058   }
1059 
SetRootMaterializedObjects(FixedArray * objects)1060   void SetRootMaterializedObjects(FixedArray* objects) {
1061     roots_[kMaterializedObjectsRootIndex] = objects;
1062   }
1063 
SetRootScriptList(Object * value)1064   void SetRootScriptList(Object* value) {
1065     roots_[kScriptListRootIndex] = value;
1066   }
1067 
SetRootStringTable(StringTable * value)1068   void SetRootStringTable(StringTable* value) {
1069     roots_[kStringTableRootIndex] = value;
1070   }
1071 
SetRootNoScriptSharedFunctionInfos(Object * value)1072   void SetRootNoScriptSharedFunctionInfos(Object* value) {
1073     roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
1074   }
1075 
SetMessageListeners(TemplateList * value)1076   void SetMessageListeners(TemplateList* value) {
1077     roots_[kMessageListenersRootIndex] = value;
1078   }
1079 
1080   // Set the stack limit in the roots_ array.  Some architectures generate
1081   // code that looks here, because it is faster than loading from the static
1082   // jslimit_/real_jslimit_ variable in the StackGuard.
1083   void SetStackLimits();
1084 
1085   // The stack limit is thread-dependent. To be able to reproduce the same
1086   // snapshot blob, we need to reset it before serializing.
1087   void ClearStackLimits();
1088 
1089   // Generated code can treat direct references to this root as constant.
1090   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1091 
1092   Map* MapForFixedTypedArray(ExternalArrayType array_type);
1093   RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1094 
1095   RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1096   FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1097 
1098   void RegisterStrongRoots(Object** start, Object** end);
1099   void UnregisterStrongRoots(Object** start);
1100 
1101   // ===========================================================================
1102   // Inline allocation. ========================================================
1103   // ===========================================================================
1104 
1105   // Indicates whether inline bump-pointer allocation has been disabled.
inline_allocation_disabled()1106   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1107 
1108   // Switch whether inline bump-pointer allocation should be used.
1109   void EnableInlineAllocation();
1110   void DisableInlineAllocation();
1111 
1112   // ===========================================================================
1113   // Methods triggering GCs. ===================================================
1114   // ===========================================================================
1115 
1116   // Performs garbage collection operation.
1117   // Returns whether there is a chance that another major GC could
1118   // collect more garbage.
1119   inline bool CollectGarbage(
1120       AllocationSpace space, GarbageCollectionReason gc_reason,
1121       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1122 
1123   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
1124   // non-zero, then the slower precise sweeper is used, which leaves the heap
1125   // in a state where we can iterate over the heap visiting all objects.
1126   void CollectAllGarbage(
1127       int flags, GarbageCollectionReason gc_reason,
1128       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1129 
1130   // Last hope GC, should try to squeeze as much as possible.
1131   void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
1132 
1133   // Reports and external memory pressure event, either performs a major GC or
1134   // completes incremental marking in order to free external resources.
1135   void ReportExternalMemoryPressure();
1136 
1137   // Invoked when GC was requested via the stack guard.
1138   void HandleGCRequest();
1139 
1140   // ===========================================================================
1141   // Iterators. ================================================================
1142   // ===========================================================================
1143 
1144   // Iterates over all roots in the heap.
1145   void IterateRoots(ObjectVisitor* v, VisitMode mode);
1146   // Iterates over all strong roots in the heap.
1147   void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1148   // Iterates over entries in the smi roots list.  Only interesting to the
1149   // serializer/deserializer, since GC does not care about smis.
1150   void IterateSmiRoots(ObjectVisitor* v);
1151   // Iterates over all the other roots in the heap.
1152   void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1153 
1154   // Iterate pointers of promoted objects.
1155   void IterateAndScavengePromotedObject(HeapObject* target, int size,
1156                                         bool was_marked_black);
1157 
1158   // ===========================================================================
1159   // Store buffer API. =========================================================
1160   // ===========================================================================
1161 
1162   // Write barrier support for object[offset] = o;
1163   inline void RecordWrite(Object* object, int offset, Object* o);
1164   inline void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* target);
1165   void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* target);
1166   void RecordWritesIntoCode(Code* code);
1167   inline void RecordFixedArrayElements(FixedArray* array, int offset,
1168                                        int length);
1169 
1170   inline Address* store_buffer_top_address();
1171 
1172   void ClearRecordedSlot(HeapObject* object, Object** slot);
1173   void ClearRecordedSlotRange(Address start, Address end);
1174 
1175   // ===========================================================================
1176   // Incremental marking API. ==================================================
1177   // ===========================================================================
1178 
1179   // Start incremental marking and ensure that idle time handler can perform
1180   // incremental steps.
1181   void StartIdleIncrementalMarking(GarbageCollectionReason gc_reason);
1182 
1183   // Starts incremental marking assuming incremental marking is currently
1184   // stopped.
1185   void StartIncrementalMarking(
1186       int gc_flags, GarbageCollectionReason gc_reason,
1187       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
1188 
1189   void StartIncrementalMarkingIfAllocationLimitIsReached(
1190       int gc_flags,
1191       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
1192 
1193   void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
1194 
1195   bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms,
1196                                          GarbageCollectionReason gc_reason);
1197 
1198   void RegisterReservationsForBlackAllocation(Reservation* reservations);
1199 
incremental_marking()1200   IncrementalMarking* incremental_marking() { return incremental_marking_; }
1201 
1202   // ===========================================================================
1203   // Embedder heap tracer support. =============================================
1204   // ===========================================================================
1205 
1206   void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
1207 
UsingEmbedderHeapTracer()1208   bool UsingEmbedderHeapTracer() { return embedder_heap_tracer() != nullptr; }
1209 
1210   void TracePossibleWrapper(JSObject* js_object);
1211 
1212   void RegisterExternallyReferencedObject(Object** object);
1213 
1214   void RegisterWrappersWithEmbedderHeapTracer();
1215 
1216   // In order to avoid running out of memory we force tracing wrappers if there
1217   // are too many of them.
1218   bool RequiresImmediateWrapperProcessing();
1219 
embedder_heap_tracer()1220   EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
1221 
wrappers_to_trace()1222   size_t wrappers_to_trace() { return wrappers_to_trace_.size(); }
1223 
1224   // ===========================================================================
1225   // External string table API. ================================================
1226   // ===========================================================================
1227 
1228   // Registers an external string.
1229   inline void RegisterExternalString(String* string);
1230 
1231   // Finalizes an external string by deleting the associated external
1232   // data and clearing the resource pointer.
1233   inline void FinalizeExternalString(String* string);
1234 
1235   // ===========================================================================
1236   // Methods checking/returning the space of a given object/address. ===========
1237   // ===========================================================================
1238 
1239   // Returns whether the object resides in new space.
1240   inline bool InNewSpace(Object* object);
1241   inline bool InFromSpace(Object* object);
1242   inline bool InToSpace(Object* object);
1243 
1244   // Returns whether the object resides in old space.
1245   inline bool InOldSpace(Object* object);
1246 
1247   // Checks whether an address/object in the heap (including auxiliary
1248   // area and unused area).
1249   bool Contains(HeapObject* value);
1250 
1251   // Checks whether an address/object in a space.
1252   // Currently used by tests, serialization and heap verification only.
1253   bool InSpace(HeapObject* value, AllocationSpace space);
1254 
1255   // Slow methods that can be used for verification as they can also be used
1256   // with off-heap Addresses.
1257   bool ContainsSlow(Address addr);
1258   bool InSpaceSlow(Address addr, AllocationSpace space);
1259   inline bool InNewSpaceSlow(Address address);
1260   inline bool InOldSpaceSlow(Address address);
1261 
1262   // ===========================================================================
1263   // Object statistics tracking. ===============================================
1264   // ===========================================================================
1265 
1266   // Returns the number of buckets used by object statistics tracking during a
1267   // major GC. Note that the following methods fail gracefully when the bounds
1268   // are exceeded though.
1269   size_t NumberOfTrackedHeapObjectTypes();
1270 
1271   // Returns object statistics about count and size at the last major GC.
1272   // Objects are being grouped into buckets that roughly resemble existing
1273   // instance types.
1274   size_t ObjectCountAtLastGC(size_t index);
1275   size_t ObjectSizeAtLastGC(size_t index);
1276 
1277   // Retrieves names of buckets used by object statistics tracking.
1278   bool GetObjectTypeName(size_t index, const char** object_type,
1279                          const char** object_sub_type);
1280 
1281   // ===========================================================================
1282   // Code statistics. ==========================================================
1283   // ===========================================================================
1284 
1285   // Collect code (Code and BytecodeArray objects) statistics.
1286   void CollectCodeStatistics();
1287 
1288   // ===========================================================================
1289   // GC statistics. ============================================================
1290   // ===========================================================================
1291 
1292   // Returns the maximum amount of memory reserved for the heap.
MaxReserved()1293   size_t MaxReserved() {
1294     return 2 * max_semi_space_size_ + max_old_generation_size_;
1295   }
MaxSemiSpaceSize()1296   size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
InitialSemiSpaceSize()1297   size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
MaxOldGenerationSize()1298   size_t MaxOldGenerationSize() { return max_old_generation_size_; }
MaxExecutableSize()1299   size_t MaxExecutableSize() { return max_executable_size_; }
1300 
1301   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
1302   // more spaces are needed until it reaches the limit.
1303   size_t Capacity();
1304 
1305   // Returns the capacity of the old generation.
1306   size_t OldGenerationCapacity();
1307 
1308   // Returns the amount of memory currently committed for the heap.
1309   size_t CommittedMemory();
1310 
1311   // Returns the amount of memory currently committed for the old space.
1312   size_t CommittedOldGenerationMemory();
1313 
1314   // Returns the amount of executable memory currently committed for the heap.
1315   size_t CommittedMemoryExecutable();
1316 
1317   // Returns the amount of phyical memory currently committed for the heap.
1318   size_t CommittedPhysicalMemory();
1319 
1320   // Returns the maximum amount of memory ever committed for the heap.
MaximumCommittedMemory()1321   size_t MaximumCommittedMemory() { return maximum_committed_; }
1322 
1323   // Updates the maximum committed memory for the heap. Should be called
1324   // whenever a space grows.
1325   void UpdateMaximumCommitted();
1326 
1327   // Returns the available bytes in space w/o growing.
1328   // Heap doesn't guarantee that it can allocate an object that requires
1329   // all available bytes. Check MaxHeapObjectSize() instead.
1330   size_t Available();
1331 
1332   // Returns of size of all objects residing in the heap.
1333   size_t SizeOfObjects();
1334 
1335   void UpdateSurvivalStatistics(int start_new_space_size);
1336 
IncrementPromotedObjectsSize(size_t object_size)1337   inline void IncrementPromotedObjectsSize(size_t object_size) {
1338     promoted_objects_size_ += object_size;
1339   }
promoted_objects_size()1340   inline size_t promoted_objects_size() { return promoted_objects_size_; }
1341 
IncrementSemiSpaceCopiedObjectSize(size_t object_size)1342   inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) {
1343     semi_space_copied_object_size_ += object_size;
1344   }
semi_space_copied_object_size()1345   inline size_t semi_space_copied_object_size() {
1346     return semi_space_copied_object_size_;
1347   }
1348 
SurvivedNewSpaceObjectSize()1349   inline size_t SurvivedNewSpaceObjectSize() {
1350     return promoted_objects_size_ + semi_space_copied_object_size_;
1351   }
1352 
IncrementNodesDiedInNewSpace()1353   inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1354 
IncrementNodesCopiedInNewSpace()1355   inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1356 
IncrementNodesPromoted()1357   inline void IncrementNodesPromoted() { nodes_promoted_++; }
1358 
IncrementYoungSurvivorsCounter(size_t survived)1359   inline void IncrementYoungSurvivorsCounter(size_t survived) {
1360     survived_last_scavenge_ = survived;
1361     survived_since_last_expansion_ += survived;
1362   }
1363 
PromotedTotalSize()1364   inline uint64_t PromotedTotalSize() {
1365     return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1366   }
1367 
1368   inline void UpdateNewSpaceAllocationCounter();
1369 
1370   inline size_t NewSpaceAllocationCounter();
1371 
1372   // This should be used only for testing.
set_new_space_allocation_counter(size_t new_value)1373   void set_new_space_allocation_counter(size_t new_value) {
1374     new_space_allocation_counter_ = new_value;
1375   }
1376 
UpdateOldGenerationAllocationCounter()1377   void UpdateOldGenerationAllocationCounter() {
1378     old_generation_allocation_counter_at_last_gc_ =
1379         OldGenerationAllocationCounter();
1380   }
1381 
OldGenerationAllocationCounter()1382   size_t OldGenerationAllocationCounter() {
1383     return old_generation_allocation_counter_at_last_gc_ +
1384            PromotedSinceLastGC();
1385   }
1386 
1387   // This should be used only for testing.
set_old_generation_allocation_counter_at_last_gc(size_t new_value)1388   void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
1389     old_generation_allocation_counter_at_last_gc_ = new_value;
1390   }
1391 
PromotedSinceLastGC()1392   size_t PromotedSinceLastGC() {
1393     return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
1394   }
1395 
gc_count()1396   int gc_count() const { return gc_count_; }
1397 
1398   // Returns the size of objects residing in non new spaces.
1399   size_t PromotedSpaceSizeOfObjects();
1400 
total_regexp_code_generated()1401   double total_regexp_code_generated() { return total_regexp_code_generated_; }
IncreaseTotalRegexpCodeGenerated(int size)1402   void IncreaseTotalRegexpCodeGenerated(int size) {
1403     total_regexp_code_generated_ += size;
1404   }
1405 
IncrementCodeGeneratedBytes(bool is_crankshafted,int size)1406   void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1407     if (is_crankshafted) {
1408       crankshaft_codegen_bytes_generated_ += size;
1409     } else {
1410       full_codegen_bytes_generated_ += size;
1411     }
1412   }
1413 
1414   // ===========================================================================
1415   // Prologue/epilogue callback methods.========================================
1416   // ===========================================================================
1417 
1418   void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
1419                              GCType gc_type_filter, bool pass_isolate = true);
1420   void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
1421 
1422   void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
1423                              GCType gc_type_filter, bool pass_isolate = true);
1424   void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1425 
1426   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1427   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1428 
1429   // ===========================================================================
1430   // Allocation methods. =======================================================
1431   // ===========================================================================
1432 
1433   // Creates a filler object and returns a heap object immediately after it.
1434   MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
1435                                                 int filler_size);
1436 
1437   // Creates a filler object if needed for alignment and returns a heap object
1438   // immediately after it. If any space is left after the returned object,
1439   // another filler object is created so the over allocated memory is iterable.
1440   MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1441                                               int object_size,
1442                                               int allocation_size,
1443                                               AllocationAlignment alignment);
1444 
1445   // ===========================================================================
1446   // ArrayBuffer tracking. =====================================================
1447   // ===========================================================================
1448 
1449   // TODO(gc): API usability: encapsulate mutation of JSArrayBuffer::is_external
1450   // in the registration/unregistration APIs. Consider dropping the "New" from
1451   // "RegisterNewArrayBuffer" because one can re-register a previously
1452   // unregistered buffer, too, and the name is confusing.
1453   void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
1454   void UnregisterArrayBuffer(JSArrayBuffer* buffer);
1455 
1456   // ===========================================================================
1457   // Allocation site tracking. =================================================
1458   // ===========================================================================
1459 
1460   // Updates the AllocationSite of a given {object}. If the global prenuring
1461   // storage is passed as {pretenuring_feedback} the memento found count on
1462   // the corresponding allocation site is immediately updated and an entry
1463   // in the hash map is created. Otherwise the entry (including a the count
1464   // value) is cached on the local pretenuring feedback.
1465   template <UpdateAllocationSiteMode mode>
1466   inline void UpdateAllocationSite(HeapObject* object,
1467                                    base::HashMap* pretenuring_feedback);
1468 
1469   // Removes an entry from the global pretenuring storage.
1470   inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
1471 
1472   // Merges local pretenuring feedback into the global one. Note that this
1473   // method needs to be called after evacuation, as allocation sites may be
1474   // evacuated and this method resolves forward pointers accordingly.
1475   void MergeAllocationSitePretenuringFeedback(
1476       const base::HashMap& local_pretenuring_feedback);
1477 
1478 // =============================================================================
1479 
1480 #ifdef VERIFY_HEAP
1481   // Verify the heap is in its normal state before or after a GC.
1482   void Verify();
1483 #endif
1484 
1485 #ifdef DEBUG
set_allocation_timeout(int timeout)1486   void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
1487 
1488   void TracePathToObjectFrom(Object* target, Object* root);
1489   void TracePathToObject(Object* target);
1490   void TracePathToGlobal();
1491 
1492   void Print();
1493   void PrintHandles();
1494 
1495   // Report heap statistics.
1496   void ReportHeapStatistics(const char* title);
1497   void ReportCodeStatistics(const char* title);
1498 #endif
1499 
1500   static const char* GarbageCollectionReasonToString(
1501       GarbageCollectionReason gc_reason);
1502 
1503  private:
1504   class PretenuringScope;
1505 
1506   // External strings table is a place where all external strings are
1507   // registered.  We need to keep track of such strings to properly
1508   // finalize them.
1509   class ExternalStringTable {
1510    public:
1511     // Registers an external string.
1512     inline void AddString(String* string);
1513 
1514     inline void Iterate(ObjectVisitor* v);
1515 
1516     // Restores internal invariant and gets rid of collected strings.
1517     // Must be called after each Iterate() that modified the strings.
1518     void CleanUp();
1519 
1520     // Destroys all allocated memory.
1521     void TearDown();
1522 
1523    private:
ExternalStringTable(Heap * heap)1524     explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
1525 
1526     inline void Verify();
1527 
1528     inline void AddOldString(String* string);
1529 
1530     // Notifies the table that only a prefix of the new list is valid.
1531     inline void ShrinkNewStrings(int position);
1532 
1533     // To speed up scavenge collections new space string are kept
1534     // separate from old space strings.
1535     List<Object*> new_space_strings_;
1536     List<Object*> old_space_strings_;
1537 
1538     Heap* heap_;
1539 
1540     friend class Heap;
1541 
1542     DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
1543   };
1544 
1545   struct StrongRootsList;
1546 
1547   struct StringTypeTable {
1548     InstanceType type;
1549     int size;
1550     RootListIndex index;
1551   };
1552 
1553   struct ConstantStringTable {
1554     const char* contents;
1555     RootListIndex index;
1556   };
1557 
1558   struct StructTable {
1559     InstanceType type;
1560     int size;
1561     RootListIndex index;
1562   };
1563 
1564   struct GCCallbackPair {
GCCallbackPairGCCallbackPair1565     GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
1566                    bool pass_isolate)
1567         : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
1568 
1569     bool operator==(const GCCallbackPair& other) const {
1570       return other.callback == callback;
1571     }
1572 
1573     v8::Isolate::GCCallback callback;
1574     GCType gc_type;
1575     bool pass_isolate;
1576   };
1577 
1578   typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
1579                                                         Object** pointer);
1580 
1581   static const int kInitialStringTableSize = 2048;
1582   static const int kInitialEvalCacheSize = 64;
1583   static const int kInitialNumberStringCacheSize = 256;
1584 
1585   static const int kRememberedUnmappedPages = 128;
1586 
1587   static const StringTypeTable string_type_table[];
1588   static const ConstantStringTable constant_string_table[];
1589   static const StructTable struct_table[];
1590 
1591   static const int kYoungSurvivalRateHighThreshold = 90;
1592   static const int kYoungSurvivalRateAllowedDeviation = 15;
1593   static const int kOldSurvivalRateLowThreshold = 10;
1594 
1595   static const int kMaxMarkCompactsInIdleRound = 7;
1596   static const int kIdleScavengeThreshold = 5;
1597 
1598   static const int kInitialFeedbackCapacity = 256;
1599 
1600   Heap();
1601 
1602   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1603       Heap* heap, Object** pointer);
1604 
1605   // Selects the proper allocation space based on the pretenuring decision.
SelectSpace(PretenureFlag pretenure)1606   static AllocationSpace SelectSpace(PretenureFlag pretenure) {
1607     return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
1608   }
1609 
1610 #define ROOT_ACCESSOR(type, name, camel_name) \
1611   inline void set_##name(type* value);
ROOT_LIST(ROOT_ACCESSOR)1612   ROOT_LIST(ROOT_ACCESSOR)
1613 #undef ROOT_ACCESSOR
1614 
1615   StoreBuffer* store_buffer() { return store_buffer_; }
1616 
set_current_gc_flags(int flags)1617   void set_current_gc_flags(int flags) {
1618     current_gc_flags_ = flags;
1619     DCHECK(!ShouldFinalizeIncrementalMarking() ||
1620            !ShouldAbortIncrementalMarking());
1621   }
1622 
ShouldReduceMemory()1623   inline bool ShouldReduceMemory() const {
1624     return current_gc_flags_ & kReduceMemoryFootprintMask;
1625   }
1626 
ShouldAbortIncrementalMarking()1627   inline bool ShouldAbortIncrementalMarking() const {
1628     return current_gc_flags_ & kAbortIncrementalMarkingMask;
1629   }
1630 
ShouldFinalizeIncrementalMarking()1631   inline bool ShouldFinalizeIncrementalMarking() const {
1632     return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
1633   }
1634 
1635   // Checks whether both, the internal marking deque, and the embedder provided
1636   // one are empty. Avoid in fast path as it potentially calls through the API.
1637   bool MarkingDequesAreEmpty();
1638 
1639   void PreprocessStackTraces();
1640 
1641   // Checks whether a global GC is necessary
1642   GarbageCollector SelectGarbageCollector(AllocationSpace space,
1643                                           const char** reason);
1644 
1645   // Make sure there is a filler value behind the top of the new space
1646   // so that the GC does not confuse some unintialized/stale memory
1647   // with the allocation memento of the object at the top
1648   void EnsureFillerObjectAtTop();
1649 
1650   // Ensure that we have swept all spaces in such a way that we can iterate
1651   // over all objects.  May cause a GC.
1652   void MakeHeapIterable();
1653 
1654   // Performs garbage collection operation.
1655   // Returns whether there is a chance that another major GC could
1656   // collect more garbage.
1657   bool CollectGarbage(
1658       GarbageCollector collector, GarbageCollectionReason gc_reason,
1659       const char* collector_reason,
1660       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1661 
1662   // Performs garbage collection
1663   // Returns whether there is a chance another major GC could
1664   // collect more garbage.
1665   bool PerformGarbageCollection(
1666       GarbageCollector collector,
1667       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1668 
1669   inline void UpdateOldSpaceLimits();
1670 
1671   // Initializes a JSObject based on its map.
1672   void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1673                                  Map* map);
1674 
1675   // Initializes JSObject body starting at given offset.
1676   void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
1677 
1678   void InitializeAllocationMemento(AllocationMemento* memento,
1679                                    AllocationSite* allocation_site);
1680 
1681   bool CreateInitialMaps();
1682   void CreateInitialObjects();
1683 
1684   // These five Create*EntryStub functions are here and forced to not be inlined
1685   // because of a gcc-4.4 bug that assigns wrong vtable entries.
1686   NO_INLINE(void CreateJSEntryStub());
1687   NO_INLINE(void CreateJSConstructEntryStub());
1688 
1689   void CreateFixedStubs();
1690 
1691   HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
1692 
1693   // Commits from space if it is uncommitted.
1694   void EnsureFromSpaceIsCommitted();
1695 
1696   // Uncommit unused semi space.
1697   bool UncommitFromSpace();
1698 
1699   // Fill in bogus values in from space
1700   void ZapFromSpace();
1701 
1702   // Deopts all code that contains allocation instruction which are tenured or
1703   // not tenured. Moreover it clears the pretenuring allocation site statistics.
1704   void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1705 
1706   // Evaluates local pretenuring for the old space and calls
1707   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1708   // the old space.
1709   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1710 
1711   // Record statistics before and after garbage collection.
1712   void ReportStatisticsBeforeGC();
1713   void ReportStatisticsAfterGC();
1714 
1715   // Creates and installs the full-sized number string cache.
1716   int FullSizeNumberStringCacheLength();
1717   // Flush the number to string cache.
1718   void FlushNumberStringCache();
1719 
1720   void ConfigureInitialOldGenerationSize();
1721 
1722   bool HasLowYoungGenerationAllocationRate();
1723   bool HasLowOldGenerationAllocationRate();
1724   double YoungGenerationMutatorUtilization();
1725   double OldGenerationMutatorUtilization();
1726 
1727   void ReduceNewSpaceSize();
1728 
1729   GCIdleTimeHeapState ComputeHeapState();
1730 
1731   bool PerformIdleTimeAction(GCIdleTimeAction action,
1732                              GCIdleTimeHeapState heap_state,
1733                              double deadline_in_ms);
1734 
1735   void IdleNotificationEpilogue(GCIdleTimeAction action,
1736                                 GCIdleTimeHeapState heap_state, double start_ms,
1737                                 double deadline_in_ms);
1738 
1739   inline void UpdateAllocationsHash(HeapObject* object);
1740   inline void UpdateAllocationsHash(uint32_t value);
1741   void PrintAlloctionsHash();
1742 
1743   void AddToRingBuffer(const char* string);
1744   void GetFromRingBuffer(char* buffer);
1745 
1746   void CompactRetainedMaps(ArrayList* retained_maps);
1747 
1748   void CollectGarbageOnMemoryPressure();
1749 
1750   // Attempt to over-approximate the weak closure by marking object groups and
1751   // implicit references from global handles, but don't atomically complete
1752   // marking. If we continue to mark incrementally, we might have marked
1753   // objects that die later.
1754   void FinalizeIncrementalMarking(GarbageCollectionReason gc_reason);
1755 
1756   // Returns the timer used for a given GC type.
1757   // - GCScavenger: young generation GC
1758   // - GCCompactor: full GC
1759   // - GCFinalzeMC: finalization of incremental full GC
1760   // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
1761   // memory reduction
1762   HistogramTimer* GCTypeTimer(GarbageCollector collector);
1763 
1764   // ===========================================================================
1765   // Pretenuring. ==============================================================
1766   // ===========================================================================
1767 
1768   // Pretenuring decisions are made based on feedback collected during new space
1769   // evacuation. Note that between feedback collection and calling this method
1770   // object in old space must not move.
1771   void ProcessPretenuringFeedback();
1772 
1773   // ===========================================================================
1774   // Actual GC. ================================================================
1775   // ===========================================================================
1776 
1777   // Code that should be run before and after each GC.  Includes some
1778   // reporting/verification activities when compiled with DEBUG set.
1779   void GarbageCollectionPrologue();
1780   void GarbageCollectionEpilogue();
1781 
1782   // Performs a major collection in the whole heap.
1783   void MarkCompact();
1784   // Performs a minor collection of just the young generation.
1785   void MinorMarkCompact();
1786 
1787   // Code to be run before and after mark-compact.
1788   void MarkCompactPrologue();
1789   void MarkCompactEpilogue();
1790 
1791   // Performs a minor collection in new generation.
1792   void Scavenge();
1793 
1794   Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1795 
1796   void UpdateNewSpaceReferencesInExternalStringTable(
1797       ExternalStringTableUpdaterCallback updater_func);
1798 
1799   void UpdateReferencesInExternalStringTable(
1800       ExternalStringTableUpdaterCallback updater_func);
1801 
1802   void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1803   void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1804   void ProcessNativeContexts(WeakObjectRetainer* retainer);
1805   void ProcessAllocationSites(WeakObjectRetainer* retainer);
1806   void ProcessWeakListRoots(WeakObjectRetainer* retainer);
1807 
1808   // ===========================================================================
1809   // GC statistics. ============================================================
1810   // ===========================================================================
1811 
OldGenerationSpaceAvailable()1812   inline size_t OldGenerationSpaceAvailable() {
1813     if (old_generation_allocation_limit_ <= PromotedTotalSize()) return 0;
1814     return old_generation_allocation_limit_ -
1815            static_cast<size_t>(PromotedTotalSize());
1816   }
1817 
1818   // We allow incremental marking to overshoot the allocation limit for
1819   // performace reasons. If the overshoot is too large then we are more
1820   // eager to finalize incremental marking.
AllocationLimitOvershotByLargeMargin()1821   inline bool AllocationLimitOvershotByLargeMargin() {
1822     // This guards against too eager finalization in small heaps.
1823     // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
1824     size_t kMarginForSmallHeaps = 32u * MB;
1825     if (old_generation_allocation_limit_ >= PromotedTotalSize()) return false;
1826     uint64_t overshoot = PromotedTotalSize() - old_generation_allocation_limit_;
1827     // Overshoot margin is 50% of allocation limit or half-way to the max heap
1828     // with special handling of small heaps.
1829     uint64_t margin =
1830         Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
1831             (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
1832     return overshoot >= margin;
1833   }
1834 
1835   void UpdateTotalGCTime(double duration);
1836 
MaximumSizeScavenge()1837   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1838 
1839   // ===========================================================================
1840   // Growing strategy. =========================================================
1841   // ===========================================================================
1842 
1843   // Decrease the allocation limit if the new limit based on the given
1844   // parameters is lower than the current limit.
1845   void DampenOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
1846                                           double mutator_speed);
1847 
1848   // Calculates the allocation limit based on a given growing factor and a
1849   // given old generation size.
1850   size_t CalculateOldGenerationAllocationLimit(double factor,
1851                                                size_t old_gen_size);
1852 
1853   // Sets the allocation limit to trigger the next full garbage collection.
1854   void SetOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
1855                                        double mutator_speed);
1856 
1857   size_t MinimumAllocationLimitGrowingStep();
1858 
old_generation_allocation_limit()1859   size_t old_generation_allocation_limit() const {
1860     return old_generation_allocation_limit_;
1861   }
1862 
always_allocate()1863   bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
1864 
CanExpandOldGeneration(int size)1865   bool CanExpandOldGeneration(int size) {
1866     if (force_oom_) return false;
1867     return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
1868   }
1869 
IsCloseToOutOfMemory(size_t slack)1870   bool IsCloseToOutOfMemory(size_t slack) {
1871     return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
1872   }
1873 
1874   bool ShouldExpandOldGenerationOnSlowAllocation();
1875 
1876   enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
1877   IncrementalMarkingLimit IncrementalMarkingLimitReached();
1878 
1879   // ===========================================================================
1880   // Idle notification. ========================================================
1881   // ===========================================================================
1882 
1883   bool RecentIdleNotificationHappened();
1884   void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
1885 
1886   // ===========================================================================
1887   // HeapIterator helpers. =====================================================
1888   // ===========================================================================
1889 
heap_iterator_start()1890   void heap_iterator_start() { heap_iterator_depth_++; }
1891 
heap_iterator_end()1892   void heap_iterator_end() { heap_iterator_depth_--; }
1893 
in_heap_iterator()1894   bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
1895 
1896   // ===========================================================================
1897   // Allocation methods. =======================================================
1898   // ===========================================================================
1899 
1900   // Returns a deep copy of the JavaScript object.
1901   // Properties and elements are copied too.
1902   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1903   MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1904                                                 AllocationSite* site = NULL);
1905 
1906   // Allocates a JS Map in the heap.
1907   MUST_USE_RESULT AllocationResult
1908   AllocateMap(InstanceType instance_type, int instance_size,
1909               ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1910 
1911   // Allocates and initializes a new JavaScript object based on a
1912   // constructor.
1913   // If allocation_site is non-null, then a memento is emitted after the object
1914   // that points to the site.
1915   MUST_USE_RESULT AllocationResult AllocateJSObject(
1916       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
1917       AllocationSite* allocation_site = NULL);
1918 
1919   // Allocates and initializes a new JavaScript object based on a map.
1920   // Passing an allocation site means that a memento will be created that
1921   // points to the site.
1922   MUST_USE_RESULT AllocationResult
1923   AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1924                           AllocationSite* allocation_site = NULL);
1925 
1926   // Allocates a HeapNumber from value.
1927   MUST_USE_RESULT AllocationResult
1928   AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1929                      PretenureFlag pretenure = NOT_TENURED);
1930 
1931 // Allocates SIMD values from the given lane values.
1932 #define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
1933   AllocationResult Allocate##Type(lane_type lanes[lane_count],             \
1934                                   PretenureFlag pretenure = NOT_TENURED);
1935   SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
1936 #undef SIMD_ALLOCATE_DECLARATION
1937 
1938   // Allocates a byte array of the specified length
1939   MUST_USE_RESULT AllocationResult
1940   AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1941 
1942   // Allocates a bytecode array with given contents.
1943   MUST_USE_RESULT AllocationResult
1944   AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
1945                         int parameter_count, FixedArray* constant_pool);
1946 
1947   MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1948 
1949   MUST_USE_RESULT AllocationResult
1950   CopyBytecodeArray(BytecodeArray* bytecode_array);
1951 
1952   // Allocates a fixed array initialized with undefined values
1953   MUST_USE_RESULT AllocationResult
1954   AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1955 
1956   // Allocate an uninitialized object.  The memory is non-executable if the
1957   // hardware and OS allow.  This is the single choke-point for allocations
1958   // performed by the runtime and should not be bypassed (to extend this to
1959   // inlined allocations, use the Heap::DisableInlineAllocation() support).
1960   MUST_USE_RESULT inline AllocationResult AllocateRaw(
1961       int size_in_bytes, AllocationSpace space,
1962       AllocationAlignment aligment = kWordAligned);
1963 
1964   // Allocates a heap object based on the map.
1965   MUST_USE_RESULT AllocationResult
1966       Allocate(Map* map, AllocationSpace space,
1967                AllocationSite* allocation_site = NULL);
1968 
1969   // Allocates a partial map for bootstrapping.
1970   MUST_USE_RESULT AllocationResult
1971       AllocatePartialMap(InstanceType instance_type, int instance_size);
1972 
1973   // Allocate a block of memory in the given space (filled with a filler).
1974   // Used as a fall-back for generated code when the space is full.
1975   MUST_USE_RESULT AllocationResult
1976       AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1977 
1978   // Allocate an uninitialized fixed array.
1979   MUST_USE_RESULT AllocationResult
1980       AllocateRawFixedArray(int length, PretenureFlag pretenure);
1981 
1982   // Allocate an uninitialized fixed double array.
1983   MUST_USE_RESULT AllocationResult
1984       AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1985 
1986   // Allocate an initialized fixed array with the given filler value.
1987   MUST_USE_RESULT AllocationResult
1988       AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1989                                    Object* filler);
1990 
1991   // Allocate and partially initializes a String.  There are two String
1992   // encodings: one-byte and two-byte.  These functions allocate a string of
1993   // the given length and set its map and length fields.  The characters of
1994   // the string are uninitialized.
1995   MUST_USE_RESULT AllocationResult
1996       AllocateRawOneByteString(int length, PretenureFlag pretenure);
1997   MUST_USE_RESULT AllocationResult
1998       AllocateRawTwoByteString(int length, PretenureFlag pretenure);
1999 
2000   // Allocates an internalized string in old space based on the character
2001   // stream.
2002   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
2003       Vector<const char> str, int chars, uint32_t hash_field);
2004 
2005   MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
2006       Vector<const uint8_t> str, uint32_t hash_field);
2007 
2008   MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
2009       Vector<const uc16> str, uint32_t hash_field);
2010 
2011   template <bool is_one_byte, typename T>
2012   MUST_USE_RESULT AllocationResult
2013       AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
2014 
2015   template <typename T>
2016   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
2017       T t, int chars, uint32_t hash_field);
2018 
2019   // Allocates an uninitialized fixed array. It must be filled by the caller.
2020   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
2021 
2022   // Make a copy of src and return it.
2023   MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
2024 
2025   // Make a copy of src, also grow the copy, and return the copy.
2026   MUST_USE_RESULT AllocationResult
2027   CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
2028 
2029   // Make a copy of src, also grow the copy, and return the copy.
2030   MUST_USE_RESULT AllocationResult CopyFixedArrayUpTo(FixedArray* src,
2031                                                       int new_len,
2032                                                       PretenureFlag pretenure);
2033 
2034   // Make a copy of src, set the map, and return the copy.
2035   MUST_USE_RESULT AllocationResult
2036       CopyFixedArrayWithMap(FixedArray* src, Map* map);
2037 
2038   // Make a copy of src and return it.
2039   MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
2040       FixedDoubleArray* src);
2041 
2042   // Computes a single character string where the character has code.
2043   // A cache is used for one-byte (Latin1) codes.
2044   MUST_USE_RESULT AllocationResult
2045       LookupSingleCharacterStringFromCode(uint16_t code);
2046 
2047   // Allocate a symbol in old space.
2048   MUST_USE_RESULT AllocationResult AllocateSymbol();
2049 
2050   // Allocates an external array of the specified length and type.
2051   MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
2052       int length, ExternalArrayType array_type, void* external_pointer,
2053       PretenureFlag pretenure);
2054 
2055   // Allocates a fixed typed array of the specified length and type.
2056   MUST_USE_RESULT AllocationResult
2057   AllocateFixedTypedArray(int length, ExternalArrayType array_type,
2058                           bool initialize, PretenureFlag pretenure);
2059 
2060   // Make a copy of src and return it.
2061   MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
2062 
2063   // Make a copy of src, set the map, and return the copy.
2064   MUST_USE_RESULT AllocationResult
2065       CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
2066 
2067   // Allocates a fixed double array with uninitialized values. Returns
2068   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
2069       int length, PretenureFlag pretenure = NOT_TENURED);
2070 
2071   // Allocate empty fixed array.
2072   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
2073 
2074   // Allocate empty scope info.
2075   MUST_USE_RESULT AllocationResult AllocateEmptyScopeInfo();
2076 
2077   // Allocate empty fixed typed array of given type.
2078   MUST_USE_RESULT AllocationResult
2079       AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
2080 
2081   // Allocate a tenured simple cell.
2082   MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
2083 
2084   // Allocate a tenured JS global property cell initialized with the hole.
2085   MUST_USE_RESULT AllocationResult AllocatePropertyCell();
2086 
2087   MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
2088 
2089   MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
2090 
2091   // Allocates a new utility object in the old generation.
2092   MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
2093 
2094   // Allocates a new foreign object.
2095   MUST_USE_RESULT AllocationResult
2096       AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
2097 
2098   MUST_USE_RESULT AllocationResult
2099       AllocateCode(int object_size, bool immovable);
2100 
2101   MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
2102 
2103   MUST_USE_RESULT AllocationResult InternalizeString(String* str);
2104 
2105   // ===========================================================================
2106 
set_force_oom(bool value)2107   void set_force_oom(bool value) { force_oom_ = value; }
2108 
2109   // The amount of external memory registered through the API.
2110   int64_t external_memory_;
2111 
2112   // The limit when to trigger memory pressure from the API.
2113   int64_t external_memory_limit_;
2114 
2115   // Caches the amount of external memory registered at the last MC.
2116   int64_t external_memory_at_last_mark_compact_;
2117 
2118   // The amount of memory that has been freed concurrently.
2119   base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
2120 
2121   // This can be calculated directly from a pointer to the heap; however, it is
2122   // more expedient to get at the isolate directly from within Heap methods.
2123   Isolate* isolate_;
2124 
2125   Object* roots_[kRootListLength];
2126 
2127   size_t code_range_size_;
2128   size_t max_semi_space_size_;
2129   size_t initial_semispace_size_;
2130   size_t max_old_generation_size_;
2131   size_t initial_old_generation_size_;
2132   bool old_generation_size_configured_;
2133   size_t max_executable_size_;
2134   size_t maximum_committed_;
2135 
2136   // For keeping track of how much data has survived
2137   // scavenge since last new space expansion.
2138   size_t survived_since_last_expansion_;
2139 
2140   // ... and since the last scavenge.
2141   size_t survived_last_scavenge_;
2142 
2143   // This is not the depth of nested AlwaysAllocateScope's but rather a single
2144   // count, as scopes can be acquired from multiple tasks (read: threads).
2145   base::AtomicNumber<size_t> always_allocate_scope_count_;
2146 
2147   // Stores the memory pressure level that set by MemoryPressureNotification
2148   // and reset by a mark-compact garbage collection.
2149   base::AtomicValue<MemoryPressureLevel> memory_pressure_level_;
2150 
2151   // For keeping track of context disposals.
2152   int contexts_disposed_;
2153 
2154   // The length of the retained_maps array at the time of context disposal.
2155   // This separates maps in the retained_maps array that were created before
2156   // and after context disposal.
2157   int number_of_disposed_maps_;
2158 
2159   int global_ic_age_;
2160 
2161   NewSpace* new_space_;
2162   OldSpace* old_space_;
2163   OldSpace* code_space_;
2164   MapSpace* map_space_;
2165   LargeObjectSpace* lo_space_;
2166   // Map from the space id to the space.
2167   Space* space_[LAST_SPACE + 1];
2168   HeapState gc_state_;
2169   int gc_post_processing_depth_;
2170   Address new_space_top_after_last_gc_;
2171 
2172   // Returns the amount of external memory registered since last global gc.
2173   uint64_t PromotedExternalMemorySize();
2174 
2175   // How many "runtime allocations" happened.
2176   uint32_t allocations_count_;
2177 
2178   // Running hash over allocations performed.
2179   uint32_t raw_allocations_hash_;
2180 
2181   // How many mark-sweep collections happened.
2182   unsigned int ms_count_;
2183 
2184   // How many gc happened.
2185   unsigned int gc_count_;
2186 
2187   // For post mortem debugging.
2188   int remembered_unmapped_pages_index_;
2189   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2190 
2191 #ifdef DEBUG
2192   // If the --gc-interval flag is set to a positive value, this
2193   // variable holds the value indicating the number of allocations
2194   // remain until the next failure and garbage collection.
2195   int allocation_timeout_;
2196 #endif  // DEBUG
2197 
2198   // Limit that triggers a global GC on the next (normally caused) GC.  This
2199   // is checked when we have already decided to do a GC to help determine
2200   // which collector to invoke, before expanding a paged space in the old
2201   // generation and on every allocation in large object space.
2202   size_t old_generation_allocation_limit_;
2203 
2204   // Indicates that inline bump-pointer allocation has been globally disabled
2205   // for all spaces. This is used to disable allocations in generated code.
2206   bool inline_allocation_disabled_;
2207 
2208   // Weak list heads, threaded through the objects.
2209   // List heads are initialized lazily and contain the undefined_value at start.
2210   Object* native_contexts_list_;
2211   Object* allocation_sites_list_;
2212 
2213   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
2214   // marking. It is initialized during marking, destroyed after marking and
2215   // contains Smi(0) while marking is not active.
2216   Object* encountered_weak_collections_;
2217 
2218   Object* encountered_weak_cells_;
2219 
2220   Object* encountered_transition_arrays_;
2221 
2222   List<GCCallbackPair> gc_epilogue_callbacks_;
2223   List<GCCallbackPair> gc_prologue_callbacks_;
2224 
2225   // Total RegExp code ever generated
2226   double total_regexp_code_generated_;
2227 
2228   int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
2229 
2230   GCTracer* tracer_;
2231 
2232   size_t promoted_objects_size_;
2233   double promotion_ratio_;
2234   double promotion_rate_;
2235   size_t semi_space_copied_object_size_;
2236   size_t previous_semi_space_copied_object_size_;
2237   double semi_space_copied_rate_;
2238   int nodes_died_in_new_space_;
2239   int nodes_copied_in_new_space_;
2240   int nodes_promoted_;
2241 
2242   // This is the pretenuring trigger for allocation sites that are in maybe
2243   // tenure state. When we switched to the maximum new space size we deoptimize
2244   // the code that belongs to the allocation site and derive the lifetime
2245   // of the allocation site.
2246   unsigned int maximum_size_scavenges_;
2247 
2248   // Total time spent in GC.
2249   double total_gc_time_ms_;
2250 
2251   // Last time an idle notification happened.
2252   double last_idle_notification_time_;
2253 
2254   // Last time a garbage collection happened.
2255   double last_gc_time_;
2256 
2257   Scavenger* scavenge_collector_;
2258 
2259   MarkCompactCollector* mark_compact_collector_;
2260 
2261   MemoryAllocator* memory_allocator_;
2262 
2263   StoreBuffer* store_buffer_;
2264 
2265   IncrementalMarking* incremental_marking_;
2266 
2267   GCIdleTimeHandler* gc_idle_time_handler_;
2268 
2269   MemoryReducer* memory_reducer_;
2270 
2271   ObjectStats* live_object_stats_;
2272   ObjectStats* dead_object_stats_;
2273 
2274   ScavengeJob* scavenge_job_;
2275 
2276   AllocationObserver* idle_scavenge_observer_;
2277 
2278   // These two counters are monotomically increasing and never reset.
2279   size_t full_codegen_bytes_generated_;
2280   size_t crankshaft_codegen_bytes_generated_;
2281 
2282   // This counter is increased before each GC and never reset.
2283   // To account for the bytes allocated since the last GC, use the
2284   // NewSpaceAllocationCounter() function.
2285   size_t new_space_allocation_counter_;
2286 
2287   // This counter is increased before each GC and never reset. To
2288   // account for the bytes allocated since the last GC, use the
2289   // OldGenerationAllocationCounter() function.
2290   size_t old_generation_allocation_counter_at_last_gc_;
2291 
2292   // The size of objects in old generation after the last MarkCompact GC.
2293   size_t old_generation_size_at_last_gc_;
2294 
2295   // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2296   // this variable holds the number of garbage collections since the last
2297   // deoptimization triggered by garbage collection.
2298   int gcs_since_last_deopt_;
2299 
2300   // The feedback storage is used to store allocation sites (keys) and how often
2301   // they have been visited (values) by finding a memento behind an object. The
2302   // storage is only alive temporary during a GC. The invariant is that all
2303   // pointers in this map are already fixed, i.e., they do not point to
2304   // forwarding pointers.
2305   base::HashMap* global_pretenuring_feedback_;
2306 
2307   char trace_ring_buffer_[kTraceRingBufferSize];
2308   // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
2309   // full then the data is from ring_buffer_end_ to the end of the buffer and
2310   // from 0 to ring_buffer_end_.
2311   bool ring_buffer_full_;
2312   size_t ring_buffer_end_;
2313 
2314   // Shared state read by the scavenge collector and set by ScavengeObject.
2315   PromotionQueue promotion_queue_;
2316 
2317   // Flag is set when the heap has been configured.  The heap can be repeatedly
2318   // configured through the API until it is set up.
2319   bool configured_;
2320 
2321   // Currently set GC flags that are respected by all GC components.
2322   int current_gc_flags_;
2323 
2324   // Currently set GC callback flags that are used to pass information between
2325   // the embedder and V8's GC.
2326   GCCallbackFlags current_gc_callback_flags_;
2327 
2328   ExternalStringTable external_string_table_;
2329 
2330   base::Mutex relocation_mutex_;
2331 
2332   int gc_callbacks_depth_;
2333 
2334   bool deserialization_complete_;
2335 
2336   StrongRootsList* strong_roots_list_;
2337 
2338   // The depth of HeapIterator nestings.
2339   int heap_iterator_depth_;
2340 
2341   EmbedderHeapTracer* embedder_heap_tracer_;
2342   std::vector<std::pair<void*, void*>> wrappers_to_trace_;
2343 
2344   // Used for testing purposes.
2345   bool force_oom_;
2346   bool delay_sweeper_tasks_for_testing_;
2347 
2348   // Classes in "heap" can be friends.
2349   friend class AlwaysAllocateScope;
2350   friend class GCCallbacksScope;
2351   friend class GCTracer;
2352   friend class HeapIterator;
2353   friend class IdleScavengeObserver;
2354   friend class IncrementalMarking;
2355   friend class IncrementalMarkingJob;
2356   friend class LargeObjectSpace;
2357   friend class MarkCompactCollector;
2358   friend class MarkCompactMarkingVisitor;
2359   friend class NewSpace;
2360   friend class ObjectStatsCollector;
2361   friend class Page;
2362   friend class PagedSpace;
2363   friend class Scavenger;
2364   friend class StoreBuffer;
2365   friend class TestMemoryAllocatorScope;
2366 
2367   // The allocator interface.
2368   friend class Factory;
2369 
2370   // The Isolate constructs us.
2371   friend class Isolate;
2372 
2373   // Used in cctest.
2374   friend class HeapTester;
2375 
2376   DISALLOW_COPY_AND_ASSIGN(Heap);
2377 };
2378 
2379 
2380 class HeapStats {
2381  public:
2382   static const int kStartMarker = 0xDECADE00;
2383   static const int kEndMarker = 0xDECADE01;
2384 
2385   intptr_t* start_marker;                  //  0
2386   size_t* new_space_size;                  //  1
2387   size_t* new_space_capacity;              //  2
2388   size_t* old_space_size;                  //  3
2389   size_t* old_space_capacity;              //  4
2390   size_t* code_space_size;                 //  5
2391   size_t* code_space_capacity;             //  6
2392   size_t* map_space_size;                  //  7
2393   size_t* map_space_capacity;              //  8
2394   size_t* lo_space_size;                   //  9
2395   size_t* global_handle_count;             // 10
2396   size_t* weak_global_handle_count;        // 11
2397   size_t* pending_global_handle_count;     // 12
2398   size_t* near_death_global_handle_count;  // 13
2399   size_t* free_global_handle_count;        // 14
2400   size_t* memory_allocator_size;           // 15
2401   size_t* memory_allocator_capacity;       // 16
2402   size_t* malloced_memory;                 // 17
2403   size_t* malloced_peak_memory;            // 18
2404   size_t* objects_per_type;                // 19
2405   size_t* size_per_type;                   // 20
2406   int* os_error;                           // 21
2407   char* last_few_messages;                 // 22
2408   char* js_stacktrace;                     // 23
2409   intptr_t* end_marker;                    // 24
2410 };
2411 
2412 
2413 class AlwaysAllocateScope {
2414  public:
2415   explicit inline AlwaysAllocateScope(Isolate* isolate);
2416   inline ~AlwaysAllocateScope();
2417 
2418  private:
2419   Heap* heap_;
2420 };
2421 
2422 
2423 // Visitor class to verify interior pointers in spaces that do not contain
2424 // or care about intergenerational references. All heap object pointers have to
2425 // point into the heap to a location that has a map pointer at its first word.
2426 // Caveat: Heap::Contains is an approximation because it can return true for
2427 // objects in a heap space but above the allocation pointer.
2428 class VerifyPointersVisitor : public ObjectVisitor {
2429  public:
2430   inline void VisitPointers(Object** start, Object** end) override;
2431 };
2432 
2433 
2434 // Verify that all objects are Smis.
2435 class VerifySmisVisitor : public ObjectVisitor {
2436  public:
2437   inline void VisitPointers(Object** start, Object** end) override;
2438 };
2439 
2440 
2441 // Space iterator for iterating over all spaces of the heap.  Returns each space
2442 // in turn, and null when it is done.
2443 class AllSpaces BASE_EMBEDDED {
2444  public:
AllSpaces(Heap * heap)2445   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2446   Space* next();
2447 
2448  private:
2449   Heap* heap_;
2450   int counter_;
2451 };
2452 
2453 
2454 // Space iterator for iterating over all old spaces of the heap: Old space
2455 // and code space.  Returns each space in turn, and null when it is done.
2456 class V8_EXPORT_PRIVATE OldSpaces BASE_EMBEDDED {
2457  public:
OldSpaces(Heap * heap)2458   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2459   OldSpace* next();
2460 
2461  private:
2462   Heap* heap_;
2463   int counter_;
2464 };
2465 
2466 
2467 // Space iterator for iterating over all the paged spaces of the heap: Map
2468 // space, old space, code space and cell space.  Returns
2469 // each space in turn, and null when it is done.
2470 class PagedSpaces BASE_EMBEDDED {
2471  public:
PagedSpaces(Heap * heap)2472   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2473   PagedSpace* next();
2474 
2475  private:
2476   Heap* heap_;
2477   int counter_;
2478 };
2479 
2480 
2481 class SpaceIterator : public Malloced {
2482  public:
2483   explicit SpaceIterator(Heap* heap);
2484   virtual ~SpaceIterator();
2485 
2486   bool has_next();
2487   Space* next();
2488 
2489  private:
2490   Heap* heap_;
2491   int current_space_;         // from enum AllocationSpace.
2492 };
2493 
2494 
2495 // A HeapIterator provides iteration over the whole heap. It
2496 // aggregates the specific iterators for the different spaces as
2497 // these can only iterate over one space only.
2498 //
2499 // HeapIterator ensures there is no allocation during its lifetime
2500 // (using an embedded DisallowHeapAllocation instance).
2501 //
2502 // HeapIterator can skip free list nodes (that is, de-allocated heap
2503 // objects that still remain in the heap). As implementation of free
2504 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2505 // phases. Also, it is forbidden to interrupt iteration in this mode,
2506 // as this will leave heap objects marked (and thus, unusable).
2507 class HeapIterator BASE_EMBEDDED {
2508  public:
2509   enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2510 
2511   explicit HeapIterator(Heap* heap,
2512                         HeapObjectsFiltering filtering = kNoFiltering);
2513   ~HeapIterator();
2514 
2515   HeapObject* next();
2516 
2517  private:
2518   struct MakeHeapIterableHelper {
MakeHeapIterableHelperMakeHeapIterableHelper2519     explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2520   };
2521 
2522   HeapObject* NextObject();
2523 
2524   // The following two fields need to be declared in this order. Initialization
2525   // order guarantees that we first make the heap iterable (which may involve
2526   // allocations) and only then lock it down by not allowing further
2527   // allocations.
2528   MakeHeapIterableHelper make_heap_iterable_helper_;
2529   DisallowHeapAllocation no_heap_allocation_;
2530 
2531   Heap* heap_;
2532   HeapObjectsFiltering filtering_;
2533   HeapObjectsFilter* filter_;
2534   // Space iterator for iterating all the spaces.
2535   SpaceIterator* space_iterator_;
2536   // Object iterator for the space currently being iterated.
2537   std::unique_ptr<ObjectIterator> object_iterator_;
2538 };
2539 
2540 // Abstract base class for checking whether a weak object should be retained.
2541 class WeakObjectRetainer {
2542  public:
~WeakObjectRetainer()2543   virtual ~WeakObjectRetainer() {}
2544 
2545   // Return whether this object should be retained. If NULL is returned the
2546   // object has no references. Otherwise the address of the retained object
2547   // should be returned as in some GC situations the object has been moved.
2548   virtual Object* RetainAs(Object* object) = 0;
2549 };
2550 
2551 
2552 #ifdef DEBUG
2553 // Helper class for tracing paths to a search target Object from all roots.
2554 // The TracePathFrom() method can be used to trace paths from a specific
2555 // object to the search target object.
2556 class PathTracer : public ObjectVisitor {
2557  public:
2558   enum WhatToFind {
2559     FIND_ALL,   // Will find all matches.
2560     FIND_FIRST  // Will stop the search after first match.
2561   };
2562 
2563   // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2564   static const int kMarkTag = 2;
2565 
2566   // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2567   // after the first match.  If FIND_ALL is specified, then tracing will be
2568   // done for all matches.
PathTracer(Object * search_target,WhatToFind what_to_find,VisitMode visit_mode)2569   PathTracer(Object* search_target, WhatToFind what_to_find,
2570              VisitMode visit_mode)
2571       : search_target_(search_target),
2572         found_target_(false),
2573         found_target_in_trace_(false),
2574         what_to_find_(what_to_find),
2575         visit_mode_(visit_mode),
2576         object_stack_(20),
2577         no_allocation() {}
2578 
2579   void VisitPointers(Object** start, Object** end) override;
2580 
2581   void Reset();
2582   void TracePathFrom(Object** root);
2583 
found()2584   bool found() const { return found_target_; }
2585 
2586   static Object* const kAnyGlobalObject;
2587 
2588  protected:
2589   class MarkVisitor;
2590   class UnmarkVisitor;
2591 
2592   void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2593   void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2594   virtual void ProcessResults();
2595 
2596   Object* search_target_;
2597   bool found_target_;
2598   bool found_target_in_trace_;
2599   WhatToFind what_to_find_;
2600   VisitMode visit_mode_;
2601   List<Object*> object_stack_;
2602 
2603   DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
2604 
2605  private:
2606   DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2607 };
2608 #endif  // DEBUG
2609 
2610 // -----------------------------------------------------------------------------
2611 // Allows observation of allocations.
2612 class AllocationObserver {
2613  public:
AllocationObserver(intptr_t step_size)2614   explicit AllocationObserver(intptr_t step_size)
2615       : step_size_(step_size), bytes_to_next_step_(step_size) {
2616     DCHECK(step_size >= kPointerSize);
2617   }
~AllocationObserver()2618   virtual ~AllocationObserver() {}
2619 
2620   // Called each time the observed space does an allocation step. This may be
2621   // more frequently than the step_size we are monitoring (e.g. when there are
2622   // multiple observers, or when page or space boundary is encountered.)
AllocationStep(int bytes_allocated,Address soon_object,size_t size)2623   void AllocationStep(int bytes_allocated, Address soon_object, size_t size) {
2624     bytes_to_next_step_ -= bytes_allocated;
2625     if (bytes_to_next_step_ <= 0) {
2626       Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
2627            size);
2628       step_size_ = GetNextStepSize();
2629       bytes_to_next_step_ = step_size_;
2630     }
2631   }
2632 
2633  protected:
step_size()2634   intptr_t step_size() const { return step_size_; }
bytes_to_next_step()2635   intptr_t bytes_to_next_step() const { return bytes_to_next_step_; }
2636 
2637   // Pure virtual method provided by the subclasses that gets called when at
2638   // least step_size bytes have been allocated. soon_object is the address just
2639   // allocated (but not yet initialized.) size is the size of the object as
2640   // requested (i.e. w/o the alignment fillers). Some complexities to be aware
2641   // of:
2642   // 1) soon_object will be nullptr in cases where we end up observing an
2643   //    allocation that happens to be a filler space (e.g. page boundaries.)
2644   // 2) size is the requested size at the time of allocation. Right-trimming
2645   //    may change the object size dynamically.
2646   // 3) soon_object may actually be the first object in an allocation-folding
2647   //    group. In such a case size is the size of the group rather than the
2648   //    first object.
2649   virtual void Step(int bytes_allocated, Address soon_object, size_t size) = 0;
2650 
2651   // Subclasses can override this method to make step size dynamic.
GetNextStepSize()2652   virtual intptr_t GetNextStepSize() { return step_size_; }
2653 
2654   intptr_t step_size_;
2655   intptr_t bytes_to_next_step_;
2656 
2657  private:
2658   friend class LargeObjectSpace;
2659   friend class NewSpace;
2660   friend class PagedSpace;
2661   DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
2662 };
2663 
2664 }  // namespace internal
2665 }  // namespace v8
2666 
2667 #endif  // V8_HEAP_HEAP_H_
2668