• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  // Copyright 2012 the V8 project authors. All rights reserved.
2  // Use of this source code is governed by a BSD-style license that can be
3  // found in the LICENSE file.
4  
5  #ifndef V8_HEAP_HEAP_H_
6  #define V8_HEAP_HEAP_H_
7  
8  #include <cmath>
9  #include <map>
10  
11  // Clients of this interface shouldn't depend on lots of heap internals.
12  // Do not include anything from src/heap here!
13  #include "src/allocation.h"
14  #include "src/assert-scope.h"
15  #include "src/atomic-utils.h"
16  #include "src/globals.h"
17  // TODO(mstarzinger): Two more includes to kill!
18  #include "src/heap/spaces.h"
19  #include "src/heap/store-buffer.h"
20  #include "src/list.h"
21  
22  namespace v8 {
23  namespace internal {
24  
25  // Defines all the roots in Heap.
26  #define STRONG_ROOT_LIST(V)                                                    \
27    V(Map, byte_array_map, ByteArrayMap)                                         \
28    V(Map, free_space_map, FreeSpaceMap)                                         \
29    V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
30    V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
31    /* Cluster the most popular ones in a few cache lines here at the top.    */ \
32    V(Smi, store_buffer_top, StoreBufferTop)                                     \
33    V(Oddball, undefined_value, UndefinedValue)                                  \
34    V(Oddball, the_hole_value, TheHoleValue)                                     \
35    V(Oddball, null_value, NullValue)                                            \
36    V(Oddball, true_value, TrueValue)                                            \
37    V(Oddball, false_value, FalseValue)                                          \
38    V(String, empty_string, empty_string)                                        \
39    V(String, hidden_string, hidden_string)                                      \
40    V(Oddball, uninitialized_value, UninitializedValue)                          \
41    V(Map, cell_map, CellMap)                                                    \
42    V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
43    V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
44    V(Map, meta_map, MetaMap)                                                    \
45    V(Map, heap_number_map, HeapNumberMap)                                       \
46    V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
47    V(Map, float32x4_map, Float32x4Map)                                          \
48    V(Map, int32x4_map, Int32x4Map)                                              \
49    V(Map, uint32x4_map, Uint32x4Map)                                            \
50    V(Map, bool32x4_map, Bool32x4Map)                                            \
51    V(Map, int16x8_map, Int16x8Map)                                              \
52    V(Map, uint16x8_map, Uint16x8Map)                                            \
53    V(Map, bool16x8_map, Bool16x8Map)                                            \
54    V(Map, int8x16_map, Int8x16Map)                                              \
55    V(Map, uint8x16_map, Uint8x16Map)                                            \
56    V(Map, bool8x16_map, Bool8x16Map)                                            \
57    V(Map, native_context_map, NativeContextMap)                                 \
58    V(Map, fixed_array_map, FixedArrayMap)                                       \
59    V(Map, code_map, CodeMap)                                                    \
60    V(Map, scope_info_map, ScopeInfoMap)                                         \
61    V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
62    V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
63    V(Map, weak_cell_map, WeakCellMap)                                           \
64    V(Map, transition_array_map, TransitionArrayMap)                             \
65    V(Map, one_byte_string_map, OneByteStringMap)                                \
66    V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
67    V(Map, function_context_map, FunctionContextMap)                             \
68    V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
69    V(ByteArray, empty_byte_array, EmptyByteArray)                               \
70    V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
71    /* The roots above this line should be boring from a GC point of view.    */ \
72    /* This means they are never in new space and never on a page that is     */ \
73    /* being compacted.                                                       */ \
74    V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel)      \
75    V(Oddball, arguments_marker, ArgumentsMarker)                                \
76    V(Oddball, exception, Exception)                                             \
77    V(Oddball, termination_exception, TerminationException)                      \
78    V(FixedArray, number_string_cache, NumberStringCache)                        \
79    V(Object, instanceof_cache_function, InstanceofCacheFunction)                \
80    V(Object, instanceof_cache_map, InstanceofCacheMap)                          \
81    V(Object, instanceof_cache_answer, InstanceofCacheAnswer)                    \
82    V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
83    V(FixedArray, string_split_cache, StringSplitCache)                          \
84    V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
85    V(Smi, hash_seed, HashSeed)                                                  \
86    V(Map, hash_table_map, HashTableMap)                                         \
87    V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
88    V(Map, symbol_map, SymbolMap)                                                \
89    V(Map, string_map, StringMap)                                                \
90    V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
91    V(Map, cons_string_map, ConsStringMap)                                       \
92    V(Map, sliced_string_map, SlicedStringMap)                                   \
93    V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
94    V(Map, external_string_map, ExternalStringMap)                               \
95    V(Map, external_string_with_one_byte_data_map,                               \
96      ExternalStringWithOneByteDataMap)                                          \
97    V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
98    V(Map, native_source_string_map, NativeSourceStringMap)                      \
99    V(Map, short_external_string_map, ShortExternalStringMap)                    \
100    V(Map, short_external_string_with_one_byte_data_map,                         \
101      ShortExternalStringWithOneByteDataMap)                                     \
102    V(Map, internalized_string_map, InternalizedStringMap)                       \
103    V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
104    V(Map, external_internalized_string_with_one_byte_data_map,                  \
105      ExternalInternalizedStringWithOneByteDataMap)                              \
106    V(Map, external_one_byte_internalized_string_map,                            \
107      ExternalOneByteInternalizedStringMap)                                      \
108    V(Map, short_external_internalized_string_map,                               \
109      ShortExternalInternalizedStringMap)                                        \
110    V(Map, short_external_internalized_string_with_one_byte_data_map,            \
111      ShortExternalInternalizedStringWithOneByteDataMap)                         \
112    V(Map, short_external_one_byte_internalized_string_map,                      \
113      ShortExternalOneByteInternalizedStringMap)                                 \
114    V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
115    V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
116    V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
117    V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
118    V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
119    V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
120    V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
121    V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
122    V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
123    V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
124    V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
125    V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
126    V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
127    V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
128    V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
129    V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
130    V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
131    V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
132    V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
133      EmptyFixedUint8ClampedArray)                                               \
134    V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
135    V(Map, catch_context_map, CatchContextMap)                                   \
136    V(Map, with_context_map, WithContextMap)                                     \
137    V(Map, block_context_map, BlockContextMap)                                   \
138    V(Map, module_context_map, ModuleContextMap)                                 \
139    V(Map, script_context_map, ScriptContextMap)                                 \
140    V(Map, script_context_table_map, ScriptContextTableMap)                      \
141    V(Map, undefined_map, UndefinedMap)                                          \
142    V(Map, the_hole_map, TheHoleMap)                                             \
143    V(Map, null_map, NullMap)                                                    \
144    V(Map, boolean_map, BooleanMap)                                              \
145    V(Map, uninitialized_map, UninitializedMap)                                  \
146    V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
147    V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap)   \
148    V(Map, exception_map, ExceptionMap)                                          \
149    V(Map, termination_exception_map, TerminationExceptionMap)                   \
150    V(Map, message_object_map, JSMessageObjectMap)                               \
151    V(Map, foreign_map, ForeignMap)                                              \
152    V(Map, neander_map, NeanderMap)                                              \
153    V(Map, external_map, ExternalMap)                                            \
154    V(HeapNumber, nan_value, NanValue)                                           \
155    V(HeapNumber, infinity_value, InfinityValue)                                 \
156    V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
157    V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
158    V(JSObject, message_listeners, MessageListeners)                             \
159    V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
160    V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache)      \
161    V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache)        \
162    V(Code, js_entry_code, JsEntryCode)                                          \
163    V(Code, js_construct_entry_code, JsConstructEntryCode)                       \
164    V(FixedArray, natives_source_cache, NativesSourceCache)                      \
165    V(FixedArray, experimental_natives_source_cache,                             \
166      ExperimentalNativesSourceCache)                                            \
167    V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache)           \
168    V(FixedArray, experimental_extra_natives_source_cache,                       \
169      ExperimentalExtraNativesSourceCache)                                       \
170    V(Script, empty_script, EmptyScript)                                         \
171    V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
172    V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary)    \
173    V(Cell, undefined_cell, UndefinedCell)                                       \
174    V(JSObject, observation_state, ObservationState)                             \
175    V(Object, symbol_registry, SymbolRegistry)                                   \
176    V(Object, script_list, ScriptList)                                           \
177    V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
178      EmptySlowElementDictionary)                                                \
179    V(FixedArray, materialized_objects, MaterializedObjects)                     \
180    V(FixedArray, microtask_queue, MicrotaskQueue)                               \
181    V(TypeFeedbackVector, dummy_vector, DummyVector)                             \
182    V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap)           \
183    V(FixedArray, detached_contexts, DetachedContexts)                           \
184    V(ArrayList, retained_maps, RetainedMaps)                                    \
185    V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
186    V(PropertyCell, array_protector, ArrayProtector)                             \
187    V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
188    V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
189    V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos)       \
190    V(FixedArray, interpreter_table, InterpreterTable)                           \
191    V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
192    V(WeakCell, empty_weak_cell, EmptyWeakCell)                                  \
193    V(BytecodeArray, empty_bytecode_array, EmptyBytecodeArray)
194  
195  
196  // Entries in this list are limited to Smis and are not visited during GC.
197  #define SMI_ROOT_LIST(V)                                                   \
198    V(Smi, stack_limit, StackLimit)                                          \
199    V(Smi, real_stack_limit, RealStackLimit)                                 \
200    V(Smi, last_script_id, LastScriptId)                                     \
201    V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
202    V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)       \
203    V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)             \
204    V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
205  
206  
207  #define ROOT_LIST(V)  \
208    STRONG_ROOT_LIST(V) \
209    SMI_ROOT_LIST(V)    \
210    V(StringTable, string_table, StringTable)
211  
212  #define INTERNALIZED_STRING_LIST(V)                              \
213    V(anonymous_string, "anonymous")                               \
214    V(apply_string, "apply")                                       \
215    V(assign_string, "assign")                                     \
216    V(arguments_string, "arguments")                               \
217    V(Arguments_string, "Arguments")                               \
218    V(Array_string, "Array")                                       \
219    V(bind_string, "bind")                                         \
220    V(bool16x8_string, "bool16x8")                                 \
221    V(Bool16x8_string, "Bool16x8")                                 \
222    V(bool32x4_string, "bool32x4")                                 \
223    V(Bool32x4_string, "Bool32x4")                                 \
224    V(bool8x16_string, "bool8x16")                                 \
225    V(Bool8x16_string, "Bool8x16")                                 \
226    V(boolean_string, "boolean")                                   \
227    V(Boolean_string, "Boolean")                                   \
228    V(bound__string, "bound ")                                     \
229    V(byte_length_string, "byteLength")                            \
230    V(byte_offset_string, "byteOffset")                            \
231    V(call_string, "call")                                         \
232    V(callee_string, "callee")                                     \
233    V(caller_string, "caller")                                     \
234    V(cell_value_string, "%cell_value")                            \
235    V(char_at_string, "CharAt")                                    \
236    V(closure_string, "(closure)")                                 \
237    V(compare_ic_string, "==")                                     \
238    V(configurable_string, "configurable")                         \
239    V(constructor_string, "constructor")                           \
240    V(construct_string, "construct")                               \
241    V(create_string, "create")                                     \
242    V(Date_string, "Date")                                         \
243    V(default_string, "default")                                   \
244    V(defineProperty_string, "defineProperty")                     \
245    V(deleteProperty_string, "deleteProperty")                     \
246    V(display_name_string, "displayName")                          \
247    V(done_string, "done")                                         \
248    V(dot_result_string, ".result")                                \
249    V(dot_string, ".")                                             \
250    V(enumerable_string, "enumerable")                             \
251    V(enumerate_string, "enumerate")                               \
252    V(Error_string, "Error")                                       \
253    V(eval_string, "eval")                                         \
254    V(false_string, "false")                                       \
255    V(float32x4_string, "float32x4")                               \
256    V(Float32x4_string, "Float32x4")                               \
257    V(for_api_string, "for_api")                                   \
258    V(for_string, "for")                                           \
259    V(function_string, "function")                                 \
260    V(Function_string, "Function")                                 \
261    V(Generator_string, "Generator")                               \
262    V(getOwnPropertyDescriptor_string, "getOwnPropertyDescriptor") \
263    V(getPrototypeOf_string, "getPrototypeOf")                     \
264    V(get_string, "get")                                           \
265    V(global_string, "global")                                     \
266    V(has_string, "has")                                           \
267    V(illegal_access_string, "illegal access")                     \
268    V(illegal_argument_string, "illegal argument")                 \
269    V(index_string, "index")                                       \
270    V(infinity_string, "Infinity")                                 \
271    V(input_string, "input")                                       \
272    V(int16x8_string, "int16x8")                                   \
273    V(Int16x8_string, "Int16x8")                                   \
274    V(int32x4_string, "int32x4")                                   \
275    V(Int32x4_string, "Int32x4")                                   \
276    V(int8x16_string, "int8x16")                                   \
277    V(Int8x16_string, "Int8x16")                                   \
278    V(isExtensible_string, "isExtensible")                         \
279    V(isView_string, "isView")                                     \
280    V(KeyedLoadMonomorphic_string, "KeyedLoadMonomorphic")         \
281    V(KeyedStoreMonomorphic_string, "KeyedStoreMonomorphic")       \
282    V(last_index_string, "lastIndex")                              \
283    V(length_string, "length")                                     \
284    V(Map_string, "Map")                                           \
285    V(minus_infinity_string, "-Infinity")                          \
286    V(minus_zero_string, "-0")                                     \
287    V(name_string, "name")                                         \
288    V(nan_string, "NaN")                                           \
289    V(next_string, "next")                                         \
290    V(null_string, "null")                                         \
291    V(null_to_string, "[object Null]")                             \
292    V(number_string, "number")                                     \
293    V(Number_string, "Number")                                     \
294    V(object_string, "object")                                     \
295    V(Object_string, "Object")                                     \
296    V(ownKeys_string, "ownKeys")                                   \
297    V(preventExtensions_string, "preventExtensions")               \
298    V(private_api_string, "private_api")                           \
299    V(Promise_string, "Promise")                                   \
300    V(proto_string, "__proto__")                                   \
301    V(prototype_string, "prototype")                               \
302    V(Proxy_string, "Proxy")                                       \
303    V(query_colon_string, "(?:)")                                  \
304    V(RegExp_string, "RegExp")                                     \
305    V(setPrototypeOf_string, "setPrototypeOf")                     \
306    V(set_string, "set")                                           \
307    V(Set_string, "Set")                                           \
308    V(source_mapping_url_string, "source_mapping_url")             \
309    V(source_string, "source")                                     \
310    V(source_url_string, "source_url")                             \
311    V(stack_string, "stack")                                       \
312    V(strict_compare_ic_string, "===")                             \
313    V(string_string, "string")                                     \
314    V(String_string, "String")                                     \
315    V(symbol_string, "symbol")                                     \
316    V(Symbol_string, "Symbol")                                     \
317    V(this_string, "this")                                         \
318    V(throw_string, "throw")                                       \
319    V(toJSON_string, "toJSON")                                     \
320    V(toString_string, "toString")                                 \
321    V(true_string, "true")                                         \
322    V(uint16x8_string, "uint16x8")                                 \
323    V(Uint16x8_string, "Uint16x8")                                 \
324    V(uint32x4_string, "uint32x4")                                 \
325    V(Uint32x4_string, "Uint32x4")                                 \
326    V(uint8x16_string, "uint8x16")                                 \
327    V(Uint8x16_string, "Uint8x16")                                 \
328    V(undefined_string, "undefined")                               \
329    V(undefined_to_string, "[object Undefined]")                   \
330    V(valueOf_string, "valueOf")                                   \
331    V(value_string, "value")                                       \
332    V(WeakMap_string, "WeakMap")                                   \
333    V(WeakSet_string, "WeakSet")                                   \
334    V(writable_string, "writable")
335  
336  #define PRIVATE_SYMBOL_LIST(V)              \
337    V(array_iteration_kind_symbol)            \
338    V(array_iterator_next_symbol)             \
339    V(array_iterator_object_symbol)           \
340    V(call_site_function_symbol)              \
341    V(call_site_position_symbol)              \
342    V(call_site_receiver_symbol)              \
343    V(call_site_strict_symbol)                \
344    V(class_end_position_symbol)              \
345    V(class_start_position_symbol)            \
346    V(detailed_stack_trace_symbol)            \
347    V(elements_transition_symbol)             \
348    V(error_end_pos_symbol)                   \
349    V(error_script_symbol)                    \
350    V(error_start_pos_symbol)                 \
351    V(formatted_stack_trace_symbol)           \
352    V(frozen_symbol)                          \
353    V(hash_code_symbol)                       \
354    V(home_object_symbol)                     \
355    V(internal_error_symbol)                  \
356    V(intl_impl_object_symbol)                \
357    V(intl_initialized_marker_symbol)         \
358    V(intl_pattern_symbol)                    \
359    V(intl_resolved_symbol)                   \
360    V(megamorphic_symbol)                     \
361    V(native_context_index_symbol)            \
362    V(nonexistent_symbol)                     \
363    V(nonextensible_symbol)                   \
364    V(normal_ic_symbol)                       \
365    V(not_mapped_symbol)                      \
366    V(observed_symbol)                        \
367    V(premonomorphic_symbol)                  \
368    V(promise_combined_deferred_symbol)       \
369    V(promise_debug_marker_symbol)            \
370    V(promise_has_handler_symbol)             \
371    V(promise_on_resolve_symbol)              \
372    V(promise_on_reject_symbol)               \
373    V(promise_raw_symbol)                     \
374    V(promise_status_symbol)                  \
375    V(promise_value_symbol)                   \
376    V(sealed_symbol)                          \
377    V(stack_trace_symbol)                     \
378    V(strict_function_transition_symbol)      \
379    V(string_iterator_iterated_string_symbol) \
380    V(string_iterator_next_index_symbol)      \
381    V(strong_function_transition_symbol)      \
382    V(uninitialized_symbol)
383  
384  #define PUBLIC_SYMBOL_LIST(V)                \
385    V(has_instance_symbol, Symbol.hasInstance) \
386    V(iterator_symbol, Symbol.iterator)        \
387    V(match_symbol, Symbol.match)              \
388    V(replace_symbol, Symbol.replace)          \
389    V(search_symbol, Symbol.search)            \
390    V(species_symbol, Symbol.species)          \
391    V(split_symbol, Symbol.split)              \
392    V(to_primitive_symbol, Symbol.toPrimitive) \
393    V(unscopables_symbol, Symbol.unscopables)
394  
395  // Well-Known Symbols are "Public" symbols, which have a bit set which causes
396  // them to produce an undefined value when a load results in a failed access
397  // check. Because this behaviour is not specified properly as of yet, it only
398  // applies to a subset of spec-defined Well-Known Symbols.
399  #define WELL_KNOWN_SYMBOL_LIST(V)                           \
400    V(is_concat_spreadable_symbol, Symbol.isConcatSpreadable) \
401    V(to_string_tag_symbol, Symbol.toStringTag)
402  
403  // Heap roots that are known to be immortal immovable, for which we can safely
404  // skip write barriers. This list is not complete and has omissions.
405  #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
406    V(ByteArrayMap)                       \
407    V(BytecodeArrayMap)                   \
408    V(FreeSpaceMap)                       \
409    V(OnePointerFillerMap)                \
410    V(TwoPointerFillerMap)                \
411    V(UndefinedValue)                     \
412    V(TheHoleValue)                       \
413    V(NullValue)                          \
414    V(TrueValue)                          \
415    V(FalseValue)                         \
416    V(UninitializedValue)                 \
417    V(CellMap)                            \
418    V(GlobalPropertyCellMap)              \
419    V(SharedFunctionInfoMap)              \
420    V(MetaMap)                            \
421    V(HeapNumberMap)                      \
422    V(MutableHeapNumberMap)               \
423    V(Float32x4Map)                       \
424    V(Int32x4Map)                         \
425    V(Uint32x4Map)                        \
426    V(Bool32x4Map)                        \
427    V(Int16x8Map)                         \
428    V(Uint16x8Map)                        \
429    V(Bool16x8Map)                        \
430    V(Int8x16Map)                         \
431    V(Uint8x16Map)                        \
432    V(Bool8x16Map)                        \
433    V(NativeContextMap)                   \
434    V(FixedArrayMap)                      \
435    V(CodeMap)                            \
436    V(ScopeInfoMap)                       \
437    V(FixedCOWArrayMap)                   \
438    V(FixedDoubleArrayMap)                \
439    V(WeakCellMap)                        \
440    V(TransitionArrayMap)                 \
441    V(NoInterceptorResultSentinel)        \
442    V(HashTableMap)                       \
443    V(OrderedHashTableMap)                \
444    V(EmptyFixedArray)                    \
445    V(EmptyByteArray)                     \
446    V(EmptyBytecodeArray)                 \
447    V(EmptyDescriptorArray)               \
448    V(ArgumentsMarker)                    \
449    V(SymbolMap)                          \
450    V(SloppyArgumentsElementsMap)         \
451    V(FunctionContextMap)                 \
452    V(CatchContextMap)                    \
453    V(WithContextMap)                     \
454    V(BlockContextMap)                    \
455    V(ModuleContextMap)                   \
456    V(ScriptContextMap)                   \
457    V(UndefinedMap)                       \
458    V(TheHoleMap)                         \
459    V(NullMap)                            \
460    V(BooleanMap)                         \
461    V(UninitializedMap)                   \
462    V(ArgumentsMarkerMap)                 \
463    V(JSMessageObjectMap)                 \
464    V(ForeignMap)                         \
465    V(NeanderMap)                         \
466    V(EmptyWeakCell)                      \
467    V(empty_string)                       \
468    PRIVATE_SYMBOL_LIST(V)
469  
470  // Forward declarations.
471  class ArrayBufferTracker;
472  class GCIdleTimeAction;
473  class GCIdleTimeHandler;
474  class GCIdleTimeHeapState;
475  class GCTracer;
476  class HeapObjectsFilter;
477  class HeapStats;
478  class HistogramTimer;
479  class Isolate;
480  class MemoryReducer;
481  class ObjectStats;
482  class Scavenger;
483  class ScavengeJob;
484  class WeakObjectRetainer;
485  
486  
487  // A queue of objects promoted during scavenge. Each object is accompanied
488  // by it's size to avoid dereferencing a map pointer for scanning.
489  // The last page in to-space is used for the promotion queue. On conflict
490  // during scavenge, the promotion queue is allocated externally and all
491  // entries are copied to the external queue.
492  class PromotionQueue {
493   public:
PromotionQueue(Heap * heap)494    explicit PromotionQueue(Heap* heap)
495        : front_(NULL),
496          rear_(NULL),
497          limit_(NULL),
498          emergency_stack_(0),
499          heap_(heap) {}
500  
501    void Initialize();
502  
Destroy()503    void Destroy() {
504      DCHECK(is_empty());
505      delete emergency_stack_;
506      emergency_stack_ = NULL;
507    }
508  
GetHeadPage()509    Page* GetHeadPage() {
510      return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
511    }
512  
SetNewLimit(Address limit)513    void SetNewLimit(Address limit) {
514      // If we are already using an emergency stack, we can ignore it.
515      if (emergency_stack_) return;
516  
517      // If the limit is not on the same page, we can ignore it.
518      if (Page::FromAllocationTop(limit) != GetHeadPage()) return;
519  
520      limit_ = reinterpret_cast<intptr_t*>(limit);
521  
522      if (limit_ <= rear_) {
523        return;
524      }
525  
526      RelocateQueueHead();
527    }
528  
IsBelowPromotionQueue(Address to_space_top)529    bool IsBelowPromotionQueue(Address to_space_top) {
530      // If an emergency stack is used, the to-space address cannot interfere
531      // with the promotion queue.
532      if (emergency_stack_) return true;
533  
534      // If the given to-space top pointer and the head of the promotion queue
535      // are not on the same page, then the to-space objects are below the
536      // promotion queue.
537      if (GetHeadPage() != Page::FromAddress(to_space_top)) {
538        return true;
539      }
540      // If the to space top pointer is smaller or equal than the promotion
541      // queue head, then the to-space objects are below the promotion queue.
542      return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
543    }
544  
is_empty()545    bool is_empty() {
546      return (front_ == rear_) &&
547             (emergency_stack_ == NULL || emergency_stack_->length() == 0);
548    }
549  
550    inline void insert(HeapObject* target, int size);
551  
remove(HeapObject ** target,int * size)552    void remove(HeapObject** target, int* size) {
553      DCHECK(!is_empty());
554      if (front_ == rear_) {
555        Entry e = emergency_stack_->RemoveLast();
556        *target = e.obj_;
557        *size = e.size_;
558        return;
559      }
560  
561      *target = reinterpret_cast<HeapObject*>(*(--front_));
562      *size = static_cast<int>(*(--front_));
563      // Assert no underflow.
564      SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
565                                  reinterpret_cast<Address>(front_));
566    }
567  
568   private:
569    // The front of the queue is higher in the memory page chain than the rear.
570    intptr_t* front_;
571    intptr_t* rear_;
572    intptr_t* limit_;
573  
574    static const int kEntrySizeInWords = 2;
575  
576    struct Entry {
EntryEntry577      Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
578  
579      HeapObject* obj_;
580      int size_;
581    };
582    List<Entry>* emergency_stack_;
583  
584    Heap* heap_;
585  
586    void RelocateQueueHead();
587  
588    DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
589  };
590  
591  
592  enum ArrayStorageAllocationMode {
593    DONT_INITIALIZE_ARRAY_ELEMENTS,
594    INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
595  };
596  
597  
598  class Heap {
599   public:
600    // Declare all the root indices.  This defines the root list order.
601    enum RootListIndex {
602  #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
603      STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
604  #undef ROOT_INDEX_DECLARATION
605  
606  #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
607          INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
608  #undef STRING_DECLARATION
609  
610  #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
611              PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
612  #undef SYMBOL_INDEX_DECLARATION
613  
614  #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
615                  PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
616                      WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
617  #undef SYMBOL_INDEX_DECLARATION
618  
619  // Utility type maps
620  #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
621                          STRUCT_LIST(DECLARE_STRUCT_MAP)
622  #undef DECLARE_STRUCT_MAP
623                              kStringTableRootIndex,
624  
625  #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
626      SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
627  #undef ROOT_INDEX_DECLARATION
628          kRootListLength,
629      kStrongRootListLength = kStringTableRootIndex,
630      kSmiRootsStart = kStringTableRootIndex + 1
631    };
632  
633    // Indicates whether live bytes adjustment is triggered
634    // - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
635    // - or from within GC (CONCURRENT_TO_SWEEPER),
636    // - or mutator code (CONCURRENT_TO_SWEEPER).
637    enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
638  
639    enum PretenuringFeedbackInsertionMode { kCached, kGlobal };
640  
641    enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
642  
643    // Taking this lock prevents the GC from entering a phase that relocates
644    // object references.
645    class RelocationLock {
646     public:
RelocationLock(Heap * heap)647      explicit RelocationLock(Heap* heap) : heap_(heap) {
648        heap_->relocation_mutex_.Lock();
649      }
650  
~RelocationLock()651      ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
652  
653     private:
654      Heap* heap_;
655    };
656  
657    // Support for partial snapshots.  After calling this we have a linear
658    // space to write objects in each space.
659    struct Chunk {
660      uint32_t size;
661      Address start;
662      Address end;
663    };
664    typedef List<Chunk> Reservation;
665  
666    static const intptr_t kMinimumOldGenerationAllocationLimit =
667        8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
668  
669    static const int kInitalOldGenerationLimitFactor = 2;
670  
671  #if V8_OS_ANDROID
672    // Don't apply pointer multiplier on Android since it has no swap space and
673    // should instead adapt it's heap size based on available physical memory.
674    static const int kPointerMultiplier = 1;
675  #else
676    static const int kPointerMultiplier = i::kPointerSize / 4;
677  #endif
678  
679    // The new space size has to be a power of 2. Sizes are in MB.
680    static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
681    static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
682    static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
683    static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
684  
685    // The old space size has to be a multiple of Page::kPageSize.
686    // Sizes are in MB.
687    static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
688    static const int kMaxOldSpaceSizeMediumMemoryDevice =
689        256 * kPointerMultiplier;
690    static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
691    static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
692  
693    // The executable size has to be a multiple of Page::kPageSize.
694    // Sizes are in MB.
695    static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
696    static const int kMaxExecutableSizeMediumMemoryDevice =
697        192 * kPointerMultiplier;
698    static const int kMaxExecutableSizeHighMemoryDevice =
699        256 * kPointerMultiplier;
700    static const int kMaxExecutableSizeHugeMemoryDevice =
701        256 * kPointerMultiplier;
702  
703    static const int kTraceRingBufferSize = 512;
704    static const int kStacktraceBufferSize = 512;
705  
706    static const double kMinHeapGrowingFactor;
707    static const double kMaxHeapGrowingFactor;
708    static const double kMaxHeapGrowingFactorMemoryConstrained;
709    static const double kMaxHeapGrowingFactorIdle;
710    static const double kTargetMutatorUtilization;
711  
712    // Sloppy mode arguments object size.
713    static const int kSloppyArgumentsObjectSize =
714        JSObject::kHeaderSize + 2 * kPointerSize;
715  
716    // Strict mode arguments has no callee so it is smaller.
717    static const int kStrictArgumentsObjectSize =
718        JSObject::kHeaderSize + 1 * kPointerSize;
719  
720    // Indicies for direct access into argument objects.
721    static const int kArgumentsLengthIndex = 0;
722  
723    // callee is only valid in sloppy mode.
724    static const int kArgumentsCalleeIndex = 1;
725  
726    static const int kNoGCFlags = 0;
727    static const int kReduceMemoryFootprintMask = 1;
728    static const int kAbortIncrementalMarkingMask = 2;
729    static const int kFinalizeIncrementalMarkingMask = 4;
730  
731    // Making the heap iterable requires us to abort incremental marking.
732    static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
733  
734    // The roots that have an index less than this are always in old space.
735    static const int kOldSpaceRoots = 0x20;
736  
737    // The minimum size of a HeapObject on the heap.
738    static const int kMinObjectSizeInWords = 2;
739  
740    STATIC_ASSERT(kUndefinedValueRootIndex ==
741                  Internals::kUndefinedValueRootIndex);
742    STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
743    STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
744    STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
745    STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
746  
747    // Calculates the maximum amount of filler that could be required by the
748    // given alignment.
749    static int GetMaximumFillToAlign(AllocationAlignment alignment);
750    // Calculates the actual amount of filler required for a given address at the
751    // given alignment.
752    static int GetFillToAlign(Address address, AllocationAlignment alignment);
753  
754    template <typename T>
755    static inline bool IsOneByte(T t, int chars);
756  
757    static void FatalProcessOutOfMemory(const char* location,
758                                        bool take_snapshot = false);
759  
760    static bool RootIsImmortalImmovable(int root_index);
761  
762    // Checks whether the space is valid.
763    static bool IsValidAllocationSpace(AllocationSpace space);
764  
765    // Generated code can embed direct references to non-writable roots if
766    // they are in new space.
767    static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
768  
769    // Zapping is needed for verify heap, and always done in debug builds.
ShouldZapGarbage()770    static inline bool ShouldZapGarbage() {
771  #ifdef DEBUG
772      return true;
773  #else
774  #ifdef VERIFY_HEAP
775      return FLAG_verify_heap;
776  #else
777      return false;
778  #endif
779  #endif
780    }
781  
782    static double HeapGrowingFactor(double gc_speed, double mutator_speed);
783  
784    // Copy block of memory from src to dst. Size of block should be aligned
785    // by pointer size.
786    static inline void CopyBlock(Address dst, Address src, int byte_size);
787  
788    // Optimized version of memmove for blocks with pointer size aligned sizes and
789    // pointer size aligned addresses.
790    static inline void MoveBlock(Address dst, Address src, int byte_size);
791  
792    // Determines a static visitor id based on the given {map} that can then be
793    // stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
794    static int GetStaticVisitorIdForMap(Map* map);
795  
796    // Notifies the heap that is ok to start marking or other activities that
797    // should not happen during deserialization.
798    void NotifyDeserializationComplete();
799  
old_generation_allocation_limit()800    intptr_t old_generation_allocation_limit() const {
801      return old_generation_allocation_limit_;
802    }
803  
always_allocate()804    bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
805  
NewSpaceAllocationTopAddress()806    Address* NewSpaceAllocationTopAddress() {
807      return new_space_.allocation_top_address();
808    }
NewSpaceAllocationLimitAddress()809    Address* NewSpaceAllocationLimitAddress() {
810      return new_space_.allocation_limit_address();
811    }
812  
OldSpaceAllocationTopAddress()813    Address* OldSpaceAllocationTopAddress() {
814      return old_space_->allocation_top_address();
815    }
OldSpaceAllocationLimitAddress()816    Address* OldSpaceAllocationLimitAddress() {
817      return old_space_->allocation_limit_address();
818    }
819  
820    // TODO(hpayer): There is still a missmatch between capacity and actual
821    // committed memory size.
822    bool CanExpandOldGeneration(int size = 0) {
823      if (force_oom_) return false;
824      return (CommittedOldGenerationMemory() + size) < MaxOldGenerationSize();
825    }
826  
827    // Clear the Instanceof cache (used when a prototype changes).
828    inline void ClearInstanceofCache();
829  
830    // FreeSpace objects have a null map after deserialization. Update the map.
831    void RepairFreeListsAfterDeserialization();
832  
833    // Move len elements within a given array from src_index index to dst_index
834    // index.
835    void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
836  
837    // Initialize a filler object to keep the ability to iterate over the heap
838    // when introducing gaps within pages.
839    void CreateFillerObjectAt(Address addr, int size);
840  
841    bool CanMoveObjectStart(HeapObject* object);
842  
843    // Maintain consistency of live bytes during incremental marking.
844    void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
845  
846    // Trim the given array from the left. Note that this relocates the object
847    // start and hence is only valid if there is only a single reference to it.
848    FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
849  
850    // Trim the given array from the right.
851    template<Heap::InvocationMode mode>
852    void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
853  
854    // Converts the given boolean condition to JavaScript boolean value.
855    inline Object* ToBoolean(bool condition);
856  
857    // Check whether the heap is currently iterable.
858    bool IsHeapIterable();
859  
860    // Notify the heap that a context has been disposed.
861    int NotifyContextDisposed(bool dependant_context);
862  
increment_scan_on_scavenge_pages()863    inline void increment_scan_on_scavenge_pages() {
864      scan_on_scavenge_pages_++;
865      if (FLAG_gc_verbose) {
866        PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
867      }
868    }
869  
decrement_scan_on_scavenge_pages()870    inline void decrement_scan_on_scavenge_pages() {
871      scan_on_scavenge_pages_--;
872      if (FLAG_gc_verbose) {
873        PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
874      }
875    }
876  
set_native_contexts_list(Object * object)877    void set_native_contexts_list(Object* object) {
878      native_contexts_list_ = object;
879    }
native_contexts_list()880    Object* native_contexts_list() const { return native_contexts_list_; }
881  
set_allocation_sites_list(Object * object)882    void set_allocation_sites_list(Object* object) {
883      allocation_sites_list_ = object;
884    }
allocation_sites_list()885    Object* allocation_sites_list() { return allocation_sites_list_; }
886  
887    // Used in CreateAllocationSiteStub and the (de)serializer.
allocation_sites_list_address()888    Object** allocation_sites_list_address() { return &allocation_sites_list_; }
889  
set_encountered_weak_collections(Object * weak_collection)890    void set_encountered_weak_collections(Object* weak_collection) {
891      encountered_weak_collections_ = weak_collection;
892    }
encountered_weak_collections()893    Object* encountered_weak_collections() const {
894      return encountered_weak_collections_;
895    }
896  
set_encountered_weak_cells(Object * weak_cell)897    void set_encountered_weak_cells(Object* weak_cell) {
898      encountered_weak_cells_ = weak_cell;
899    }
encountered_weak_cells()900    Object* encountered_weak_cells() const { return encountered_weak_cells_; }
901  
set_encountered_transition_arrays(Object * transition_array)902    void set_encountered_transition_arrays(Object* transition_array) {
903      encountered_transition_arrays_ = transition_array;
904    }
encountered_transition_arrays()905    Object* encountered_transition_arrays() const {
906      return encountered_transition_arrays_;
907    }
908  
909    // Number of mark-sweeps.
ms_count()910    int ms_count() const { return ms_count_; }
911  
912    // Checks whether the given object is allowed to be migrated from it's
913    // current space into the given destination space. Used for debugging.
914    inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
915  
916    void CheckHandleCount();
917  
918    // Number of "runtime allocations" done so far.
allocations_count()919    uint32_t allocations_count() { return allocations_count_; }
920  
921    // Print short heap statistics.
922    void PrintShortHeapStatistics();
923  
gc_state()924    inline HeapState gc_state() { return gc_state_; }
925  
IsInGCPostProcessing()926    inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
927  
928    // If an object has an AllocationMemento trailing it, return it, otherwise
929    // return NULL;
930    inline AllocationMemento* FindAllocationMemento(HeapObject* object);
931  
932    // Returns false if not able to reserve.
933    bool ReserveSpace(Reservation* reservations);
934  
935    //
936    // Support for the API.
937    //
938  
939    void CreateApiObjects();
940  
941    // Implements the corresponding V8 API function.
942    bool IdleNotification(double deadline_in_seconds);
943    bool IdleNotification(int idle_time_in_ms);
944  
945    double MonotonicallyIncreasingTimeInMs();
946  
947    void RecordStats(HeapStats* stats, bool take_snapshot = false);
948  
949    // Check new space expansion criteria and expand semispaces if it was hit.
950    void CheckNewSpaceExpansionCriteria();
951  
HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit)952    inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
953      if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
954  
955      intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
956  
957      if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
958  
959      return false;
960    }
961  
962    void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
963  
964    // An object should be promoted if the object has survived a
965    // scavenge operation.
966    inline bool ShouldBePromoted(Address old_address, int object_size);
967  
968    void ClearNormalizedMapCaches();
969  
970    void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
971  
972    inline bool OldGenerationAllocationLimitReached();
973  
974    void QueueMemoryChunkForFree(MemoryChunk* chunk);
975    void FilterStoreBufferEntriesOnAboutToBeFreedPages();
976    void FreeQueuedChunks(MemoryChunk* list_head);
977    void FreeQueuedChunks();
978    void WaitUntilUnmappingOfFreeChunksCompleted();
979  
980    // Completely clear the Instanceof cache (to stop it keeping objects alive
981    // around a GC).
982    inline void CompletelyClearInstanceofCache();
983  
984    inline uint32_t HashSeed();
985  
986    inline int NextScriptId();
987  
988    inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
989    inline void SetConstructStubDeoptPCOffset(int pc_offset);
990    inline void SetGetterStubDeoptPCOffset(int pc_offset);
991    inline void SetSetterStubDeoptPCOffset(int pc_offset);
992  
993    // For post mortem debugging.
994    void RememberUnmappedPage(Address page, bool compacted);
995  
996    // Global inline caching age: it is incremented on some GCs after context
997    // disposal. We use it to flush inline caches.
global_ic_age()998    int global_ic_age() { return global_ic_age_; }
999  
AgeInlineCaches()1000    void AgeInlineCaches() {
1001      global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1002    }
1003  
amount_of_external_allocated_memory()1004    int64_t amount_of_external_allocated_memory() {
1005      return amount_of_external_allocated_memory_;
1006    }
1007  
update_amount_of_external_allocated_memory(int64_t delta)1008    void update_amount_of_external_allocated_memory(int64_t delta) {
1009      amount_of_external_allocated_memory_ += delta;
1010    }
1011  
1012    void DeoptMarkedAllocationSites();
1013  
DeoptMaybeTenuredAllocationSites()1014    bool DeoptMaybeTenuredAllocationSites() {
1015      return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1016    }
1017  
1018    void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
1019                                       Handle<DependentCode> dep);
1020  
1021    DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
1022  
1023    void AddRetainedMap(Handle<Map> map);
1024  
1025    // This event is triggered after successful allocation of a new object made
1026    // by runtime. Allocations of target space for object evacuation do not
1027    // trigger the event. In order to track ALL allocations one must turn off
1028    // FLAG_inline_new and FLAG_use_allocation_folding.
1029    inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1030  
1031    // This event is triggered after object is moved to a new place.
1032    inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1033                            int size_in_bytes);
1034  
deserialization_complete()1035    bool deserialization_complete() const { return deserialization_complete_; }
1036  
1037    bool HasLowAllocationRate();
1038    bool HasHighFragmentation();
1039    bool HasHighFragmentation(intptr_t used, intptr_t committed);
1040  
SetOptimizeForLatency()1041    void SetOptimizeForLatency() { optimize_for_memory_usage_ = false; }
SetOptimizeForMemoryUsage()1042    void SetOptimizeForMemoryUsage() { optimize_for_memory_usage_ = true; }
ShouldOptimizeForMemoryUsage()1043    bool ShouldOptimizeForMemoryUsage() { return optimize_for_memory_usage_; }
1044  
1045    // ===========================================================================
1046    // Initialization. ===========================================================
1047    // ===========================================================================
1048  
1049    // Configure heap size in MB before setup. Return false if the heap has been
1050    // set up already.
1051    bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
1052                       int max_executable_size, size_t code_range_size);
1053    bool ConfigureHeapDefault();
1054  
1055    // Prepares the heap, setting up memory areas that are needed in the isolate
1056    // without actually creating any objects.
1057    bool SetUp();
1058  
1059    // Bootstraps the object heap with the core set of objects required to run.
1060    // Returns whether it succeeded.
1061    bool CreateHeapObjects();
1062  
1063    // Destroys all memory allocated by the heap.
1064    void TearDown();
1065  
1066    // Returns whether SetUp has been called.
1067    bool HasBeenSetUp();
1068  
1069    // ===========================================================================
1070    // Getters for spaces. =======================================================
1071    // ===========================================================================
1072  
1073    // Return the starting address and a mask for the new space.  And-masking an
1074    // address with the mask will result in the start address of the new space
1075    // for all addresses in either semispace.
NewSpaceStart()1076    Address NewSpaceStart() { return new_space_.start(); }
NewSpaceMask()1077    uintptr_t NewSpaceMask() { return new_space_.mask(); }
NewSpaceTop()1078    Address NewSpaceTop() { return new_space_.top(); }
1079  
new_space()1080    NewSpace* new_space() { return &new_space_; }
old_space()1081    OldSpace* old_space() { return old_space_; }
code_space()1082    OldSpace* code_space() { return code_space_; }
map_space()1083    MapSpace* map_space() { return map_space_; }
lo_space()1084    LargeObjectSpace* lo_space() { return lo_space_; }
1085  
paged_space(int idx)1086    PagedSpace* paged_space(int idx) {
1087      switch (idx) {
1088        case OLD_SPACE:
1089          return old_space();
1090        case MAP_SPACE:
1091          return map_space();
1092        case CODE_SPACE:
1093          return code_space();
1094        case NEW_SPACE:
1095        case LO_SPACE:
1096          UNREACHABLE();
1097      }
1098      return NULL;
1099    }
1100  
space(int idx)1101    Space* space(int idx) {
1102      switch (idx) {
1103        case NEW_SPACE:
1104          return new_space();
1105        case LO_SPACE:
1106          return lo_space();
1107        default:
1108          return paged_space(idx);
1109      }
1110    }
1111  
1112    // Returns name of the space.
1113    const char* GetSpaceName(int idx);
1114  
1115    // ===========================================================================
1116    // Getters to other components. ==============================================
1117    // ===========================================================================
1118  
tracer()1119    GCTracer* tracer() { return tracer_; }
1120  
promotion_queue()1121    PromotionQueue* promotion_queue() { return &promotion_queue_; }
1122  
1123    inline Isolate* isolate();
1124  
mark_compact_collector()1125    MarkCompactCollector* mark_compact_collector() {
1126      return mark_compact_collector_;
1127    }
1128  
1129    // ===========================================================================
1130    // Root set access. ==========================================================
1131    // ===========================================================================
1132  
1133    // Heap root getters.
1134  #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
1135    ROOT_LIST(ROOT_ACCESSOR)
1136  #undef ROOT_ACCESSOR
1137  
1138    // Utility type maps.
1139  #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
STRUCT_LIST(STRUCT_MAP_ACCESSOR)1140    STRUCT_LIST(STRUCT_MAP_ACCESSOR)
1141  #undef STRUCT_MAP_ACCESSOR
1142  
1143  #define STRING_ACCESSOR(name, str) inline String* name();
1144    INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
1145  #undef STRING_ACCESSOR
1146  
1147  #define SYMBOL_ACCESSOR(name) inline Symbol* name();
1148    PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
1149  #undef SYMBOL_ACCESSOR
1150  
1151  #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
1152    PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
1153    WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
1154  #undef SYMBOL_ACCESSOR
1155  
1156    Object* root(RootListIndex index) { return roots_[index]; }
root_handle(RootListIndex index)1157    Handle<Object> root_handle(RootListIndex index) {
1158      return Handle<Object>(&roots_[index]);
1159    }
1160  
1161    // Generated code can embed this address to get access to the roots.
roots_array_start()1162    Object** roots_array_start() { return roots_; }
1163  
1164    // Sets the stub_cache_ (only used when expanding the dictionary).
SetRootCodeStubs(UnseededNumberDictionary * value)1165    void SetRootCodeStubs(UnseededNumberDictionary* value) {
1166      roots_[kCodeStubsRootIndex] = value;
1167    }
1168  
1169    // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
SetRootNonMonomorphicCache(UnseededNumberDictionary * value)1170    void SetRootNonMonomorphicCache(UnseededNumberDictionary* value) {
1171      roots_[kNonMonomorphicCacheRootIndex] = value;
1172    }
1173  
SetRootMaterializedObjects(FixedArray * objects)1174    void SetRootMaterializedObjects(FixedArray* objects) {
1175      roots_[kMaterializedObjectsRootIndex] = objects;
1176    }
1177  
SetRootScriptList(Object * value)1178    void SetRootScriptList(Object* value) {
1179      roots_[kScriptListRootIndex] = value;
1180    }
1181  
SetRootStringTable(StringTable * value)1182    void SetRootStringTable(StringTable* value) {
1183      roots_[kStringTableRootIndex] = value;
1184    }
1185  
SetRootNoScriptSharedFunctionInfos(Object * value)1186    void SetRootNoScriptSharedFunctionInfos(Object* value) {
1187      roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
1188    }
1189  
1190    // Set the stack limit in the roots_ array.  Some architectures generate
1191    // code that looks here, because it is faster than loading from the static
1192    // jslimit_/real_jslimit_ variable in the StackGuard.
1193    void SetStackLimits();
1194  
1195    // Generated code can treat direct references to this root as constant.
1196    bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1197  
1198    Map* MapForFixedTypedArray(ExternalArrayType array_type);
1199    RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1200  
1201    RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1202    FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1203  
1204    void RegisterStrongRoots(Object** start, Object** end);
1205    void UnregisterStrongRoots(Object** start);
1206  
1207    // ===========================================================================
1208    // Inline allocation. ========================================================
1209    // ===========================================================================
1210  
1211    // Indicates whether inline bump-pointer allocation has been disabled.
inline_allocation_disabled()1212    bool inline_allocation_disabled() { return inline_allocation_disabled_; }
1213  
1214    // Switch whether inline bump-pointer allocation should be used.
1215    void EnableInlineAllocation();
1216    void DisableInlineAllocation();
1217  
1218    // ===========================================================================
1219    // Methods triggering GCs. ===================================================
1220    // ===========================================================================
1221  
1222    // Performs garbage collection operation.
1223    // Returns whether there is a chance that another major GC could
1224    // collect more garbage.
1225    inline bool CollectGarbage(
1226        AllocationSpace space, const char* gc_reason = NULL,
1227        const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1228  
1229    // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
1230    // non-zero, then the slower precise sweeper is used, which leaves the heap
1231    // in a state where we can iterate over the heap visiting all objects.
1232    void CollectAllGarbage(
1233        int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
1234        const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1235  
1236    // Last hope GC, should try to squeeze as much as possible.
1237    void CollectAllAvailableGarbage(const char* gc_reason = NULL);
1238  
1239    // Reports and external memory pressure event, either performs a major GC or
1240    // completes incremental marking in order to free external resources.
1241    void ReportExternalMemoryPressure(const char* gc_reason = NULL);
1242  
1243    // Invoked when GC was requested via the stack guard.
1244    void HandleGCRequest();
1245  
1246    // ===========================================================================
1247    // Iterators. ================================================================
1248    // ===========================================================================
1249  
1250    // Iterates over all roots in the heap.
1251    void IterateRoots(ObjectVisitor* v, VisitMode mode);
1252    // Iterates over all strong roots in the heap.
1253    void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
1254    // Iterates over entries in the smi roots list.  Only interesting to the
1255    // serializer/deserializer, since GC does not care about smis.
1256    void IterateSmiRoots(ObjectVisitor* v);
1257    // Iterates over all the other roots in the heap.
1258    void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
1259  
1260    // Iterate pointers to from semispace of new space found in memory interval
1261    // from start to end within |object|.
1262    void IteratePointersToFromSpace(HeapObject* target, int size,
1263                                    ObjectSlotCallback callback);
1264  
1265    void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
1266                                           Address end, bool record_slots,
1267                                           ObjectSlotCallback callback);
1268  
1269    // ===========================================================================
1270    // Store buffer API. =========================================================
1271    // ===========================================================================
1272  
1273    // Write barrier support for address[offset] = o.
1274    INLINE(void RecordWrite(Address address, int offset));
1275  
1276    // Write barrier support for address[start : start + len[ = o.
1277    INLINE(void RecordWrites(Address address, int start, int len));
1278  
store_buffer_top_address()1279    Address* store_buffer_top_address() {
1280      return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
1281    }
1282  
1283    // ===========================================================================
1284    // Incremental marking API. ==================================================
1285    // ===========================================================================
1286  
1287    // Start incremental marking and ensure that idle time handler can perform
1288    // incremental steps.
1289    void StartIdleIncrementalMarking();
1290  
1291    // Starts incremental marking assuming incremental marking is currently
1292    // stopped.
1293    void StartIncrementalMarking(int gc_flags = kNoGCFlags,
1294                                 const GCCallbackFlags gc_callback_flags =
1295                                     GCCallbackFlags::kNoGCCallbackFlags,
1296                                 const char* reason = nullptr);
1297  
1298    void FinalizeIncrementalMarkingIfComplete(const char* comment);
1299  
1300    bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
1301  
incremental_marking()1302    IncrementalMarking* incremental_marking() { return incremental_marking_; }
1303  
1304    // ===========================================================================
1305    // External string table API. ================================================
1306    // ===========================================================================
1307  
1308    // Registers an external string.
1309    inline void RegisterExternalString(String* string);
1310  
1311    // Finalizes an external string by deleting the associated external
1312    // data and clearing the resource pointer.
1313    inline void FinalizeExternalString(String* string);
1314  
1315    // ===========================================================================
1316    // Methods checking/returning the space of a given object/address. ===========
1317    // ===========================================================================
1318  
1319    // Returns whether the object resides in new space.
1320    inline bool InNewSpace(Object* object);
1321    inline bool InNewSpace(Address address);
1322    inline bool InNewSpacePage(Address address);
1323    inline bool InFromSpace(Object* object);
1324    inline bool InToSpace(Object* object);
1325  
1326    // Returns whether the object resides in old space.
1327    inline bool InOldSpace(Address address);
1328    inline bool InOldSpace(Object* object);
1329  
1330    // Checks whether an address/object in the heap (including auxiliary
1331    // area and unused area).
1332    bool Contains(Address addr);
1333    bool Contains(HeapObject* value);
1334  
1335    // Checks whether an address/object in a space.
1336    // Currently used by tests, serialization and heap verification only.
1337    bool InSpace(Address addr, AllocationSpace space);
1338    bool InSpace(HeapObject* value, AllocationSpace space);
1339  
1340    // ===========================================================================
1341    // Object statistics tracking. ===============================================
1342    // ===========================================================================
1343  
1344    // Returns the number of buckets used by object statistics tracking during a
1345    // major GC. Note that the following methods fail gracefully when the bounds
1346    // are exceeded though.
1347    size_t NumberOfTrackedHeapObjectTypes();
1348  
1349    // Returns object statistics about count and size at the last major GC.
1350    // Objects are being grouped into buckets that roughly resemble existing
1351    // instance types.
1352    size_t ObjectCountAtLastGC(size_t index);
1353    size_t ObjectSizeAtLastGC(size_t index);
1354  
1355    // Retrieves names of buckets used by object statistics tracking.
1356    bool GetObjectTypeName(size_t index, const char** object_type,
1357                           const char** object_sub_type);
1358  
1359    // ===========================================================================
1360    // GC statistics. ============================================================
1361    // ===========================================================================
1362  
1363    // Returns the maximum amount of memory reserved for the heap.  For
1364    // the young generation, we reserve 4 times the amount needed for a
1365    // semi space.  The young generation consists of two semi spaces and
1366    // we reserve twice the amount needed for those in order to ensure
1367    // that new space can be aligned to its size.
MaxReserved()1368    intptr_t MaxReserved() {
1369      return 4 * reserved_semispace_size_ + max_old_generation_size_;
1370    }
MaxSemiSpaceSize()1371    int MaxSemiSpaceSize() { return max_semi_space_size_; }
ReservedSemiSpaceSize()1372    int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
InitialSemiSpaceSize()1373    int InitialSemiSpaceSize() { return initial_semispace_size_; }
TargetSemiSpaceSize()1374    int TargetSemiSpaceSize() { return target_semispace_size_; }
MaxOldGenerationSize()1375    intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
MaxExecutableSize()1376    intptr_t MaxExecutableSize() { return max_executable_size_; }
1377  
1378    // Returns the capacity of the heap in bytes w/o growing. Heap grows when
1379    // more spaces are needed until it reaches the limit.
1380    intptr_t Capacity();
1381  
1382    // Returns the amount of memory currently committed for the heap.
1383    intptr_t CommittedMemory();
1384  
1385    // Returns the amount of memory currently committed for the old space.
1386    intptr_t CommittedOldGenerationMemory();
1387  
1388    // Returns the amount of executable memory currently committed for the heap.
1389    intptr_t CommittedMemoryExecutable();
1390  
1391    // Returns the amount of phyical memory currently committed for the heap.
1392    size_t CommittedPhysicalMemory();
1393  
1394    // Returns the maximum amount of memory ever committed for the heap.
MaximumCommittedMemory()1395    intptr_t MaximumCommittedMemory() { return maximum_committed_; }
1396  
1397    // Updates the maximum committed memory for the heap. Should be called
1398    // whenever a space grows.
1399    void UpdateMaximumCommitted();
1400  
1401    // Returns the available bytes in space w/o growing.
1402    // Heap doesn't guarantee that it can allocate an object that requires
1403    // all available bytes. Check MaxHeapObjectSize() instead.
1404    intptr_t Available();
1405  
1406    // Returns of size of all objects residing in the heap.
1407    intptr_t SizeOfObjects();
1408  
1409    void UpdateSurvivalStatistics(int start_new_space_size);
1410  
IncrementPromotedObjectsSize(int object_size)1411    inline void IncrementPromotedObjectsSize(int object_size) {
1412      DCHECK_GE(object_size, 0);
1413      promoted_objects_size_ += object_size;
1414    }
promoted_objects_size()1415    inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
1416  
IncrementSemiSpaceCopiedObjectSize(int object_size)1417    inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1418      DCHECK_GE(object_size, 0);
1419      semi_space_copied_object_size_ += object_size;
1420    }
semi_space_copied_object_size()1421    inline intptr_t semi_space_copied_object_size() {
1422      return semi_space_copied_object_size_;
1423    }
1424  
SurvivedNewSpaceObjectSize()1425    inline intptr_t SurvivedNewSpaceObjectSize() {
1426      return promoted_objects_size_ + semi_space_copied_object_size_;
1427    }
1428  
IncrementNodesDiedInNewSpace()1429    inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1430  
IncrementNodesCopiedInNewSpace()1431    inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1432  
IncrementNodesPromoted()1433    inline void IncrementNodesPromoted() { nodes_promoted_++; }
1434  
IncrementYoungSurvivorsCounter(int survived)1435    inline void IncrementYoungSurvivorsCounter(int survived) {
1436      DCHECK(survived >= 0);
1437      survived_last_scavenge_ = survived;
1438      survived_since_last_expansion_ += survived;
1439    }
1440  
PromotedTotalSize()1441    inline intptr_t PromotedTotalSize() {
1442      int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1443      if (total > std::numeric_limits<intptr_t>::max()) {
1444        // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
1445        return std::numeric_limits<intptr_t>::max();
1446      }
1447      if (total < 0) return 0;
1448      return static_cast<intptr_t>(total);
1449    }
1450  
UpdateNewSpaceAllocationCounter()1451    void UpdateNewSpaceAllocationCounter() {
1452      new_space_allocation_counter_ = NewSpaceAllocationCounter();
1453    }
1454  
NewSpaceAllocationCounter()1455    size_t NewSpaceAllocationCounter() {
1456      return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
1457    }
1458  
1459    // This should be used only for testing.
set_new_space_allocation_counter(size_t new_value)1460    void set_new_space_allocation_counter(size_t new_value) {
1461      new_space_allocation_counter_ = new_value;
1462    }
1463  
UpdateOldGenerationAllocationCounter()1464    void UpdateOldGenerationAllocationCounter() {
1465      old_generation_allocation_counter_ = OldGenerationAllocationCounter();
1466    }
1467  
OldGenerationAllocationCounter()1468    size_t OldGenerationAllocationCounter() {
1469      return old_generation_allocation_counter_ + PromotedSinceLastGC();
1470    }
1471  
1472    // This should be used only for testing.
set_old_generation_allocation_counter(size_t new_value)1473    void set_old_generation_allocation_counter(size_t new_value) {
1474      old_generation_allocation_counter_ = new_value;
1475    }
1476  
PromotedSinceLastGC()1477    size_t PromotedSinceLastGC() {
1478      return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
1479    }
1480  
gc_count()1481    int gc_count() const { return gc_count_; }
1482  
1483    // Returns the size of objects residing in non new spaces.
1484    intptr_t PromotedSpaceSizeOfObjects();
1485  
total_regexp_code_generated()1486    double total_regexp_code_generated() { return total_regexp_code_generated_; }
IncreaseTotalRegexpCodeGenerated(int size)1487    void IncreaseTotalRegexpCodeGenerated(int size) {
1488      total_regexp_code_generated_ += size;
1489    }
1490  
IncrementCodeGeneratedBytes(bool is_crankshafted,int size)1491    void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
1492      if (is_crankshafted) {
1493        crankshaft_codegen_bytes_generated_ += size;
1494      } else {
1495        full_codegen_bytes_generated_ += size;
1496      }
1497    }
1498  
1499    // ===========================================================================
1500    // Prologue/epilogue callback methods.========================================
1501    // ===========================================================================
1502  
1503    void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
1504                               GCType gc_type_filter, bool pass_isolate = true);
1505    void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
1506  
1507    void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
1508                               GCType gc_type_filter, bool pass_isolate = true);
1509    void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
1510  
1511    void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1512    void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1513  
1514    // ===========================================================================
1515    // Allocation methods. =======================================================
1516    // ===========================================================================
1517  
1518    // Creates a filler object and returns a heap object immediately after it.
1519    MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
1520                                                  int filler_size);
1521  
1522    // Creates a filler object if needed for alignment and returns a heap object
1523    // immediately after it. If any space is left after the returned object,
1524    // another filler object is created so the over allocated memory is iterable.
1525    MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
1526                                                int object_size,
1527                                                int allocation_size,
1528                                                AllocationAlignment alignment);
1529  
1530    // ===========================================================================
1531    // ArrayBuffer tracking. =====================================================
1532    // ===========================================================================
1533  
1534    void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
1535    void UnregisterArrayBuffer(JSArrayBuffer* buffer);
1536  
array_buffer_tracker()1537    inline ArrayBufferTracker* array_buffer_tracker() {
1538      return array_buffer_tracker_;
1539    }
1540  
1541    // ===========================================================================
1542    // Allocation site tracking. =================================================
1543    // ===========================================================================
1544  
1545    // Updates the AllocationSite of a given {object}. If the global prenuring
1546    // storage is passed as {pretenuring_feedback} the memento found count on
1547    // the corresponding allocation site is immediately updated and an entry
1548    // in the hash map is created. Otherwise the entry (including a the count
1549    // value) is cached on the local pretenuring feedback.
1550    inline void UpdateAllocationSite(HeapObject* object,
1551                                     HashMap* pretenuring_feedback);
1552  
1553    // Removes an entry from the global pretenuring storage.
1554    inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
1555  
1556    // Merges local pretenuring feedback into the global one. Note that this
1557    // method needs to be called after evacuation, as allocation sites may be
1558    // evacuated and this method resolves forward pointers accordingly.
1559    void MergeAllocationSitePretenuringFeedback(
1560        const HashMap& local_pretenuring_feedback);
1561  
1562  // =============================================================================
1563  
1564  #ifdef VERIFY_HEAP
1565    // Verify the heap is in its normal state before or after a GC.
1566    void Verify();
1567  #endif
1568  
1569  #ifdef DEBUG
set_allocation_timeout(int timeout)1570    void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
1571  
1572    void TracePathToObjectFrom(Object* target, Object* root);
1573    void TracePathToObject(Object* target);
1574    void TracePathToGlobal();
1575  
1576    void Print();
1577    void PrintHandles();
1578  
1579    // Report heap statistics.
1580    void ReportHeapStatistics(const char* title);
1581    void ReportCodeStatistics(const char* title);
1582  #endif
1583  
1584   private:
1585    class PretenuringScope;
1586    class UnmapFreeMemoryTask;
1587  
1588    // External strings table is a place where all external strings are
1589    // registered.  We need to keep track of such strings to properly
1590    // finalize them.
1591    class ExternalStringTable {
1592     public:
1593      // Registers an external string.
1594      inline void AddString(String* string);
1595  
1596      inline void Iterate(ObjectVisitor* v);
1597  
1598      // Restores internal invariant and gets rid of collected strings.
1599      // Must be called after each Iterate() that modified the strings.
1600      void CleanUp();
1601  
1602      // Destroys all allocated memory.
1603      void TearDown();
1604  
1605     private:
ExternalStringTable(Heap * heap)1606      explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
1607  
1608      inline void Verify();
1609  
1610      inline void AddOldString(String* string);
1611  
1612      // Notifies the table that only a prefix of the new list is valid.
1613      inline void ShrinkNewStrings(int position);
1614  
1615      // To speed up scavenge collections new space string are kept
1616      // separate from old space strings.
1617      List<Object*> new_space_strings_;
1618      List<Object*> old_space_strings_;
1619  
1620      Heap* heap_;
1621  
1622      friend class Heap;
1623  
1624      DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
1625    };
1626  
1627    struct StrongRootsList;
1628  
1629    struct StringTypeTable {
1630      InstanceType type;
1631      int size;
1632      RootListIndex index;
1633    };
1634  
1635    struct ConstantStringTable {
1636      const char* contents;
1637      RootListIndex index;
1638    };
1639  
1640    struct StructTable {
1641      InstanceType type;
1642      int size;
1643      RootListIndex index;
1644    };
1645  
1646    struct GCCallbackPair {
GCCallbackPairGCCallbackPair1647      GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
1648                     bool pass_isolate)
1649          : callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
1650  
1651      bool operator==(const GCCallbackPair& other) const {
1652        return other.callback == callback;
1653      }
1654  
1655      v8::Isolate::GCCallback callback;
1656      GCType gc_type;
1657      bool pass_isolate;
1658    };
1659  
1660    typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
1661                                                          Object** pointer);
1662  
1663    static const int kInitialStringTableSize = 2048;
1664    static const int kInitialEvalCacheSize = 64;
1665    static const int kInitialNumberStringCacheSize = 256;
1666  
1667    static const int kRememberedUnmappedPages = 128;
1668  
1669    static const StringTypeTable string_type_table[];
1670    static const ConstantStringTable constant_string_table[];
1671    static const StructTable struct_table[];
1672  
1673    static const int kYoungSurvivalRateHighThreshold = 90;
1674    static const int kYoungSurvivalRateAllowedDeviation = 15;
1675    static const int kOldSurvivalRateLowThreshold = 10;
1676  
1677    static const int kMaxMarkCompactsInIdleRound = 7;
1678    static const int kIdleScavengeThreshold = 5;
1679  
1680    static const int kInitialFeedbackCapacity = 256;
1681  
1682    Heap();
1683  
1684    static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1685        Heap* heap, Object** pointer);
1686  
1687    static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1688                                            StoreBufferEvent event);
1689  
1690    // Selects the proper allocation space based on the pretenuring decision.
SelectSpace(PretenureFlag pretenure)1691    static AllocationSpace SelectSpace(PretenureFlag pretenure) {
1692      return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
1693    }
1694  
1695  #define ROOT_ACCESSOR(type, name, camel_name) \
1696    inline void set_##name(type* value);
ROOT_LIST(ROOT_ACCESSOR)1697    ROOT_LIST(ROOT_ACCESSOR)
1698  #undef ROOT_ACCESSOR
1699  
1700    StoreBuffer* store_buffer() { return &store_buffer_; }
1701  
set_current_gc_flags(int flags)1702    void set_current_gc_flags(int flags) {
1703      current_gc_flags_ = flags;
1704      DCHECK(!ShouldFinalizeIncrementalMarking() ||
1705             !ShouldAbortIncrementalMarking());
1706    }
1707  
ShouldReduceMemory()1708    inline bool ShouldReduceMemory() const {
1709      return current_gc_flags_ & kReduceMemoryFootprintMask;
1710    }
1711  
ShouldAbortIncrementalMarking()1712    inline bool ShouldAbortIncrementalMarking() const {
1713      return current_gc_flags_ & kAbortIncrementalMarkingMask;
1714    }
1715  
ShouldFinalizeIncrementalMarking()1716    inline bool ShouldFinalizeIncrementalMarking() const {
1717      return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
1718    }
1719  
1720    void PreprocessStackTraces();
1721  
1722    // Checks whether a global GC is necessary
1723    GarbageCollector SelectGarbageCollector(AllocationSpace space,
1724                                            const char** reason);
1725  
1726    // Make sure there is a filler value behind the top of the new space
1727    // so that the GC does not confuse some unintialized/stale memory
1728    // with the allocation memento of the object at the top
1729    void EnsureFillerObjectAtTop();
1730  
1731    // Ensure that we have swept all spaces in such a way that we can iterate
1732    // over all objects.  May cause a GC.
1733    void MakeHeapIterable();
1734  
1735    // Performs garbage collection operation.
1736    // Returns whether there is a chance that another major GC could
1737    // collect more garbage.
1738    bool CollectGarbage(
1739        GarbageCollector collector, const char* gc_reason,
1740        const char* collector_reason,
1741        const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1742  
1743    // Performs garbage collection
1744    // Returns whether there is a chance another major GC could
1745    // collect more garbage.
1746    bool PerformGarbageCollection(
1747        GarbageCollector collector,
1748        const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1749  
1750    inline void UpdateOldSpaceLimits();
1751  
1752    // Initializes a JSObject based on its map.
1753    void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1754                                   Map* map);
1755  
1756    // Initializes JSObject body starting at given offset.
1757    void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
1758  
1759    void InitializeAllocationMemento(AllocationMemento* memento,
1760                                     AllocationSite* allocation_site);
1761  
1762    bool CreateInitialMaps();
1763    void CreateInitialObjects();
1764  
1765    // These five Create*EntryStub functions are here and forced to not be inlined
1766    // because of a gcc-4.4 bug that assigns wrong vtable entries.
1767    NO_INLINE(void CreateJSEntryStub());
1768    NO_INLINE(void CreateJSConstructEntryStub());
1769  
1770    void CreateFixedStubs();
1771  
1772    HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
1773  
1774    // Commits from space if it is uncommitted.
1775    void EnsureFromSpaceIsCommitted();
1776  
1777    // Uncommit unused semi space.
UncommitFromSpace()1778    bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1779  
1780    // Fill in bogus values in from space
1781    void ZapFromSpace();
1782  
1783    // Deopts all code that contains allocation instruction which are tenured or
1784    // not tenured. Moreover it clears the pretenuring allocation site statistics.
1785    void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
1786  
1787    // Evaluates local pretenuring for the old space and calls
1788    // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1789    // the old space.
1790    void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1791  
1792    // Record statistics before and after garbage collection.
1793    void ReportStatisticsBeforeGC();
1794    void ReportStatisticsAfterGC();
1795  
1796    // Creates and installs the full-sized number string cache.
1797    int FullSizeNumberStringCacheLength();
1798    // Flush the number to string cache.
1799    void FlushNumberStringCache();
1800  
1801    // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
1802    // Re-visit incremental marking heuristics.
IsHighSurvivalRate()1803    bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
1804  
1805    void ConfigureInitialOldGenerationSize();
1806  
1807    bool HasLowYoungGenerationAllocationRate();
1808    bool HasLowOldGenerationAllocationRate();
1809    double YoungGenerationMutatorUtilization();
1810    double OldGenerationMutatorUtilization();
1811  
1812    void ReduceNewSpaceSize();
1813  
1814    bool TryFinalizeIdleIncrementalMarking(
1815        double idle_time_in_ms, size_t size_of_objects,
1816        size_t mark_compact_speed_in_bytes_per_ms);
1817  
1818    GCIdleTimeHeapState ComputeHeapState();
1819  
1820    bool PerformIdleTimeAction(GCIdleTimeAction action,
1821                               GCIdleTimeHeapState heap_state,
1822                               double deadline_in_ms);
1823  
1824    void IdleNotificationEpilogue(GCIdleTimeAction action,
1825                                  GCIdleTimeHeapState heap_state, double start_ms,
1826                                  double deadline_in_ms);
1827  
1828    inline void UpdateAllocationsHash(HeapObject* object);
1829    inline void UpdateAllocationsHash(uint32_t value);
1830    void PrintAlloctionsHash();
1831  
1832    void AddToRingBuffer(const char* string);
1833    void GetFromRingBuffer(char* buffer);
1834  
1835    void CompactRetainedMaps(ArrayList* retained_maps);
1836  
1837    // Attempt to over-approximate the weak closure by marking object groups and
1838    // implicit references from global handles, but don't atomically complete
1839    // marking. If we continue to mark incrementally, we might have marked
1840    // objects that die later.
1841    void FinalizeIncrementalMarking(const char* gc_reason);
1842  
1843    // Returns the timer used for a given GC type.
1844    // - GCScavenger: young generation GC
1845    // - GCCompactor: full GC
1846    // - GCFinalzeMC: finalization of incremental full GC
1847    // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
1848    // memory reduction
1849    HistogramTimer* GCTypeTimer(GarbageCollector collector);
1850  
1851    // ===========================================================================
1852    // Pretenuring. ==============================================================
1853    // ===========================================================================
1854  
1855    // Pretenuring decisions are made based on feedback collected during new space
1856    // evacuation. Note that between feedback collection and calling this method
1857    // object in old space must not move.
1858    void ProcessPretenuringFeedback();
1859  
1860    // ===========================================================================
1861    // Actual GC. ================================================================
1862    // ===========================================================================
1863  
1864    // Code that should be run before and after each GC.  Includes some
1865    // reporting/verification activities when compiled with DEBUG set.
1866    void GarbageCollectionPrologue();
1867    void GarbageCollectionEpilogue();
1868  
1869    // Performs a major collection in the whole heap.
1870    void MarkCompact();
1871  
1872    // Code to be run before and after mark-compact.
1873    void MarkCompactPrologue();
1874    void MarkCompactEpilogue();
1875  
1876    // Performs a minor collection in new generation.
1877    void Scavenge();
1878  
1879    Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1880  
1881    void UpdateNewSpaceReferencesInExternalStringTable(
1882        ExternalStringTableUpdaterCallback updater_func);
1883  
1884    void UpdateReferencesInExternalStringTable(
1885        ExternalStringTableUpdaterCallback updater_func);
1886  
1887    void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1888    void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
1889    void ProcessNativeContexts(WeakObjectRetainer* retainer);
1890    void ProcessAllocationSites(WeakObjectRetainer* retainer);
1891  
1892    // ===========================================================================
1893    // GC statistics. ============================================================
1894    // ===========================================================================
1895  
OldGenerationSpaceAvailable()1896    inline intptr_t OldGenerationSpaceAvailable() {
1897      return old_generation_allocation_limit_ - PromotedTotalSize();
1898    }
1899  
1900    // Returns maximum GC pause.
get_max_gc_pause()1901    double get_max_gc_pause() { return max_gc_pause_; }
1902  
1903    // Returns maximum size of objects alive after GC.
get_max_alive_after_gc()1904    intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1905  
1906    // Returns minimal interval between two subsequent collections.
get_min_in_mutator()1907    double get_min_in_mutator() { return min_in_mutator_; }
1908  
1909    // Update GC statistics that are tracked on the Heap.
1910    void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
1911                                      double marking_time);
1912  
MaximumSizeScavenge()1913    bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1914  
1915    // ===========================================================================
1916    // Growing strategy. =========================================================
1917    // ===========================================================================
1918  
1919    // Decrease the allocation limit if the new limit based on the given
1920    // parameters is lower than the current limit.
1921    void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
1922                                            double gc_speed,
1923                                            double mutator_speed);
1924  
1925  
1926    // Calculates the allocation limit based on a given growing factor and a
1927    // given old generation size.
1928    intptr_t CalculateOldGenerationAllocationLimit(double factor,
1929                                                   intptr_t old_gen_size);
1930  
1931    // Sets the allocation limit to trigger the next full garbage collection.
1932    void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
1933                                         double mutator_speed);
1934  
1935    // ===========================================================================
1936    // Idle notification. ========================================================
1937    // ===========================================================================
1938  
1939    bool RecentIdleNotificationHappened();
1940    void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
1941  
1942    // ===========================================================================
1943    // HeapIterator helpers. =====================================================
1944    // ===========================================================================
1945  
heap_iterator_start()1946    void heap_iterator_start() { heap_iterator_depth_++; }
1947  
heap_iterator_end()1948    void heap_iterator_end() { heap_iterator_depth_--; }
1949  
in_heap_iterator()1950    bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
1951  
1952    // ===========================================================================
1953    // Allocation methods. =======================================================
1954    // ===========================================================================
1955  
1956    // Returns a deep copy of the JavaScript object.
1957    // Properties and elements are copied too.
1958    // Optionally takes an AllocationSite to be appended in an AllocationMemento.
1959    MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
1960                                                  AllocationSite* site = NULL);
1961  
1962    // Allocates a JS Map in the heap.
1963    MUST_USE_RESULT AllocationResult
1964    AllocateMap(InstanceType instance_type, int instance_size,
1965                ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1966  
1967    // Allocates and initializes a new JavaScript object based on a
1968    // constructor.
1969    // If allocation_site is non-null, then a memento is emitted after the object
1970    // that points to the site.
1971    MUST_USE_RESULT AllocationResult AllocateJSObject(
1972        JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
1973        AllocationSite* allocation_site = NULL);
1974  
1975    // Allocates and initializes a new JavaScript object based on a map.
1976    // Passing an allocation site means that a memento will be created that
1977    // points to the site.
1978    MUST_USE_RESULT AllocationResult
1979    AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1980                            AllocationSite* allocation_site = NULL);
1981  
1982    // Allocates a HeapNumber from value.
1983    MUST_USE_RESULT AllocationResult
1984    AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1985                       PretenureFlag pretenure = NOT_TENURED);
1986  
1987  // Allocates SIMD values from the given lane values.
1988  #define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
1989    AllocationResult Allocate##Type(lane_type lanes[lane_count],             \
1990                                    PretenureFlag pretenure = NOT_TENURED);
1991    SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
1992  #undef SIMD_ALLOCATE_DECLARATION
1993  
1994    // Allocates a byte array of the specified length
1995    MUST_USE_RESULT AllocationResult
1996    AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1997  
1998    // Allocates a bytecode array with given contents.
1999    MUST_USE_RESULT AllocationResult
2000    AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
2001                          int parameter_count, FixedArray* constant_pool);
2002  
2003    // Copy the code and scope info part of the code object, but insert
2004    // the provided data as the relocation information.
2005    MUST_USE_RESULT AllocationResult CopyCode(Code* code,
2006                                              Vector<byte> reloc_info);
2007  
2008    MUST_USE_RESULT AllocationResult CopyCode(Code* code);
2009  
2010    // Allocates a fixed array initialized with undefined values
2011    MUST_USE_RESULT AllocationResult
2012    AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
2013  
2014    // Allocate an uninitialized object.  The memory is non-executable if the
2015    // hardware and OS allow.  This is the single choke-point for allocations
2016    // performed by the runtime and should not be bypassed (to extend this to
2017    // inlined allocations, use the Heap::DisableInlineAllocation() support).
2018    MUST_USE_RESULT inline AllocationResult AllocateRaw(
2019        int size_in_bytes, AllocationSpace space,
2020        AllocationAlignment aligment = kWordAligned);
2021  
2022    // Allocates a heap object based on the map.
2023    MUST_USE_RESULT AllocationResult
2024        Allocate(Map* map, AllocationSpace space,
2025                 AllocationSite* allocation_site = NULL);
2026  
2027    // Allocates a partial map for bootstrapping.
2028    MUST_USE_RESULT AllocationResult
2029        AllocatePartialMap(InstanceType instance_type, int instance_size);
2030  
2031    // Allocate a block of memory in the given space (filled with a filler).
2032    // Used as a fall-back for generated code when the space is full.
2033    MUST_USE_RESULT AllocationResult
2034        AllocateFillerObject(int size, bool double_align, AllocationSpace space);
2035  
2036    // Allocate an uninitialized fixed array.
2037    MUST_USE_RESULT AllocationResult
2038        AllocateRawFixedArray(int length, PretenureFlag pretenure);
2039  
2040    // Allocate an uninitialized fixed double array.
2041    MUST_USE_RESULT AllocationResult
2042        AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
2043  
2044    // Allocate an initialized fixed array with the given filler value.
2045    MUST_USE_RESULT AllocationResult
2046        AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
2047                                     Object* filler);
2048  
2049    // Allocate and partially initializes a String.  There are two String
2050    // encodings: one-byte and two-byte.  These functions allocate a string of
2051    // the given length and set its map and length fields.  The characters of
2052    // the string are uninitialized.
2053    MUST_USE_RESULT AllocationResult
2054        AllocateRawOneByteString(int length, PretenureFlag pretenure);
2055    MUST_USE_RESULT AllocationResult
2056        AllocateRawTwoByteString(int length, PretenureFlag pretenure);
2057  
2058    // Allocates an internalized string in old space based on the character
2059    // stream.
2060    MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
2061        Vector<const char> str, int chars, uint32_t hash_field);
2062  
2063    MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
2064        Vector<const uint8_t> str, uint32_t hash_field);
2065  
2066    MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
2067        Vector<const uc16> str, uint32_t hash_field);
2068  
2069    template <bool is_one_byte, typename T>
2070    MUST_USE_RESULT AllocationResult
2071        AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
2072  
2073    template <typename T>
2074    MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
2075        T t, int chars, uint32_t hash_field);
2076  
2077    // Allocates an uninitialized fixed array. It must be filled by the caller.
2078    MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
2079  
2080    // Make a copy of src and return it.
2081    MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
2082  
2083    // Make a copy of src, also grow the copy, and return the copy.
2084    MUST_USE_RESULT AllocationResult
2085    CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
2086  
2087    // Make a copy of src, set the map, and return the copy.
2088    MUST_USE_RESULT AllocationResult
2089        CopyFixedArrayWithMap(FixedArray* src, Map* map);
2090  
2091    // Make a copy of src and return it.
2092    MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
2093        FixedDoubleArray* src);
2094  
2095    // Computes a single character string where the character has code.
2096    // A cache is used for one-byte (Latin1) codes.
2097    MUST_USE_RESULT AllocationResult
2098        LookupSingleCharacterStringFromCode(uint16_t code);
2099  
2100    // Allocate a symbol in old space.
2101    MUST_USE_RESULT AllocationResult AllocateSymbol();
2102  
2103    // Allocates an external array of the specified length and type.
2104    MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
2105        int length, ExternalArrayType array_type, void* external_pointer,
2106        PretenureFlag pretenure);
2107  
2108    // Allocates a fixed typed array of the specified length and type.
2109    MUST_USE_RESULT AllocationResult
2110    AllocateFixedTypedArray(int length, ExternalArrayType array_type,
2111                            bool initialize, PretenureFlag pretenure);
2112  
2113    // Make a copy of src and return it.
2114    MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
2115  
2116    // Make a copy of src, set the map, and return the copy.
2117    MUST_USE_RESULT AllocationResult
2118        CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
2119  
2120    // Allocates a fixed double array with uninitialized values. Returns
2121    MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
2122        int length, PretenureFlag pretenure = NOT_TENURED);
2123  
2124    // Allocate empty fixed array.
2125    MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
2126  
2127    // Allocate empty fixed typed array of given type.
2128    MUST_USE_RESULT AllocationResult
2129        AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
2130  
2131    // Allocate a tenured simple cell.
2132    MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
2133  
2134    // Allocate a tenured JS global property cell initialized with the hole.
2135    MUST_USE_RESULT AllocationResult AllocatePropertyCell();
2136  
2137    MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
2138  
2139    MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
2140  
2141    // Allocates a new utility object in the old generation.
2142    MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
2143  
2144    // Allocates a new foreign object.
2145    MUST_USE_RESULT AllocationResult
2146        AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
2147  
2148    MUST_USE_RESULT AllocationResult
2149        AllocateCode(int object_size, bool immovable);
2150  
2151    MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
2152  
2153    MUST_USE_RESULT AllocationResult InternalizeString(String* str);
2154  
2155    // ===========================================================================
2156  
set_force_oom(bool value)2157    void set_force_oom(bool value) { force_oom_ = value; }
2158  
2159    // The amount of external memory registered through the API kept alive
2160    // by global handles
2161    int64_t amount_of_external_allocated_memory_;
2162  
2163    // Caches the amount of external memory registered at the last global gc.
2164    int64_t amount_of_external_allocated_memory_at_last_global_gc_;
2165  
2166    // This can be calculated directly from a pointer to the heap; however, it is
2167    // more expedient to get at the isolate directly from within Heap methods.
2168    Isolate* isolate_;
2169  
2170    Object* roots_[kRootListLength];
2171  
2172    size_t code_range_size_;
2173    int reserved_semispace_size_;
2174    int max_semi_space_size_;
2175    int initial_semispace_size_;
2176    int target_semispace_size_;
2177    intptr_t max_old_generation_size_;
2178    intptr_t initial_old_generation_size_;
2179    bool old_generation_size_configured_;
2180    intptr_t max_executable_size_;
2181    intptr_t maximum_committed_;
2182  
2183    // For keeping track of how much data has survived
2184    // scavenge since last new space expansion.
2185    int survived_since_last_expansion_;
2186  
2187    // ... and since the last scavenge.
2188    int survived_last_scavenge_;
2189  
2190    // This is not the depth of nested AlwaysAllocateScope's but rather a single
2191    // count, as scopes can be acquired from multiple tasks (read: threads).
2192    AtomicNumber<size_t> always_allocate_scope_count_;
2193  
2194    // For keeping track of context disposals.
2195    int contexts_disposed_;
2196  
2197    // The length of the retained_maps array at the time of context disposal.
2198    // This separates maps in the retained_maps array that were created before
2199    // and after context disposal.
2200    int number_of_disposed_maps_;
2201  
2202    int global_ic_age_;
2203  
2204    int scan_on_scavenge_pages_;
2205  
2206    NewSpace new_space_;
2207    OldSpace* old_space_;
2208    OldSpace* code_space_;
2209    MapSpace* map_space_;
2210    LargeObjectSpace* lo_space_;
2211    HeapState gc_state_;
2212    int gc_post_processing_depth_;
2213    Address new_space_top_after_last_gc_;
2214  
2215    // Returns the amount of external memory registered since last global gc.
2216    int64_t PromotedExternalMemorySize();
2217  
2218    // How many "runtime allocations" happened.
2219    uint32_t allocations_count_;
2220  
2221    // Running hash over allocations performed.
2222    uint32_t raw_allocations_hash_;
2223  
2224    // How many mark-sweep collections happened.
2225    unsigned int ms_count_;
2226  
2227    // How many gc happened.
2228    unsigned int gc_count_;
2229  
2230    // For post mortem debugging.
2231    int remembered_unmapped_pages_index_;
2232    Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2233  
2234  #ifdef DEBUG
2235    // If the --gc-interval flag is set to a positive value, this
2236    // variable holds the value indicating the number of allocations
2237    // remain until the next failure and garbage collection.
2238    int allocation_timeout_;
2239  #endif  // DEBUG
2240  
2241    // Limit that triggers a global GC on the next (normally caused) GC.  This
2242    // is checked when we have already decided to do a GC to help determine
2243    // which collector to invoke, before expanding a paged space in the old
2244    // generation and on every allocation in large object space.
2245    intptr_t old_generation_allocation_limit_;
2246  
2247    // Indicates that an allocation has failed in the old generation since the
2248    // last GC.
2249    bool old_gen_exhausted_;
2250  
2251    // Indicates that memory usage is more important than latency.
2252    // TODO(ulan): Merge it with memory reducer once chromium:490559 is fixed.
2253    bool optimize_for_memory_usage_;
2254  
2255    // Indicates that inline bump-pointer allocation has been globally disabled
2256    // for all spaces. This is used to disable allocations in generated code.
2257    bool inline_allocation_disabled_;
2258  
2259    // Weak list heads, threaded through the objects.
2260    // List heads are initialized lazily and contain the undefined_value at start.
2261    Object* native_contexts_list_;
2262    Object* allocation_sites_list_;
2263  
2264    // List of encountered weak collections (JSWeakMap and JSWeakSet) during
2265    // marking. It is initialized during marking, destroyed after marking and
2266    // contains Smi(0) while marking is not active.
2267    Object* encountered_weak_collections_;
2268  
2269    Object* encountered_weak_cells_;
2270  
2271    Object* encountered_transition_arrays_;
2272  
2273    StoreBufferRebuilder store_buffer_rebuilder_;
2274  
2275    List<GCCallbackPair> gc_epilogue_callbacks_;
2276    List<GCCallbackPair> gc_prologue_callbacks_;
2277  
2278    // Total RegExp code ever generated
2279    double total_regexp_code_generated_;
2280  
2281    int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
2282  
2283    GCTracer* tracer_;
2284  
2285    int high_survival_rate_period_length_;
2286    intptr_t promoted_objects_size_;
2287    double promotion_ratio_;
2288    double promotion_rate_;
2289    intptr_t semi_space_copied_object_size_;
2290    intptr_t previous_semi_space_copied_object_size_;
2291    double semi_space_copied_rate_;
2292    int nodes_died_in_new_space_;
2293    int nodes_copied_in_new_space_;
2294    int nodes_promoted_;
2295  
2296    // This is the pretenuring trigger for allocation sites that are in maybe
2297    // tenure state. When we switched to the maximum new space size we deoptimize
2298    // the code that belongs to the allocation site and derive the lifetime
2299    // of the allocation site.
2300    unsigned int maximum_size_scavenges_;
2301  
2302    // Maximum GC pause.
2303    double max_gc_pause_;
2304  
2305    // Total time spent in GC.
2306    double total_gc_time_ms_;
2307  
2308    // Maximum size of objects alive after GC.
2309    intptr_t max_alive_after_gc_;
2310  
2311    // Minimal interval between two subsequent collections.
2312    double min_in_mutator_;
2313  
2314    // Cumulative GC time spent in marking.
2315    double marking_time_;
2316  
2317    // Cumulative GC time spent in sweeping.
2318    double sweeping_time_;
2319  
2320    // Last time an idle notification happened.
2321    double last_idle_notification_time_;
2322  
2323    // Last time a garbage collection happened.
2324    double last_gc_time_;
2325  
2326    Scavenger* scavenge_collector_;
2327  
2328    MarkCompactCollector* mark_compact_collector_;
2329  
2330    StoreBuffer store_buffer_;
2331  
2332    IncrementalMarking* incremental_marking_;
2333  
2334    GCIdleTimeHandler* gc_idle_time_handler_;
2335  
2336    MemoryReducer* memory_reducer_;
2337  
2338    ObjectStats* object_stats_;
2339  
2340    ScavengeJob* scavenge_job_;
2341  
2342    InlineAllocationObserver* idle_scavenge_observer_;
2343  
2344    // These two counters are monotomically increasing and never reset.
2345    size_t full_codegen_bytes_generated_;
2346    size_t crankshaft_codegen_bytes_generated_;
2347  
2348    // This counter is increased before each GC and never reset.
2349    // To account for the bytes allocated since the last GC, use the
2350    // NewSpaceAllocationCounter() function.
2351    size_t new_space_allocation_counter_;
2352  
2353    // This counter is increased before each GC and never reset. To
2354    // account for the bytes allocated since the last GC, use the
2355    // OldGenerationAllocationCounter() function.
2356    size_t old_generation_allocation_counter_;
2357  
2358    // The size of objects in old generation after the last MarkCompact GC.
2359    size_t old_generation_size_at_last_gc_;
2360  
2361    // If the --deopt_every_n_garbage_collections flag is set to a positive value,
2362    // this variable holds the number of garbage collections since the last
2363    // deoptimization triggered by garbage collection.
2364    int gcs_since_last_deopt_;
2365  
2366    // The feedback storage is used to store allocation sites (keys) and how often
2367    // they have been visited (values) by finding a memento behind an object. The
2368    // storage is only alive temporary during a GC. The invariant is that all
2369    // pointers in this map are already fixed, i.e., they do not point to
2370    // forwarding pointers.
2371    HashMap* global_pretenuring_feedback_;
2372  
2373    char trace_ring_buffer_[kTraceRingBufferSize];
2374    // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
2375    // full then the data is from ring_buffer_end_ to the end of the buffer and
2376    // from 0 to ring_buffer_end_.
2377    bool ring_buffer_full_;
2378    size_t ring_buffer_end_;
2379  
2380    // Shared state read by the scavenge collector and set by ScavengeObject.
2381    PromotionQueue promotion_queue_;
2382  
2383    // Flag is set when the heap has been configured.  The heap can be repeatedly
2384    // configured through the API until it is set up.
2385    bool configured_;
2386  
2387    // Currently set GC flags that are respected by all GC components.
2388    int current_gc_flags_;
2389  
2390    // Currently set GC callback flags that are used to pass information between
2391    // the embedder and V8's GC.
2392    GCCallbackFlags current_gc_callback_flags_;
2393  
2394    ExternalStringTable external_string_table_;
2395  
2396    MemoryChunk* chunks_queued_for_free_;
2397  
2398    size_t concurrent_unmapping_tasks_active_;
2399  
2400    base::Semaphore pending_unmapping_tasks_semaphore_;
2401  
2402    base::Mutex relocation_mutex_;
2403  
2404    int gc_callbacks_depth_;
2405  
2406    bool deserialization_complete_;
2407  
2408    StrongRootsList* strong_roots_list_;
2409  
2410    ArrayBufferTracker* array_buffer_tracker_;
2411  
2412    // The depth of HeapIterator nestings.
2413    int heap_iterator_depth_;
2414  
2415    // Used for testing purposes.
2416    bool force_oom_;
2417  
2418    // Classes in "heap" can be friends.
2419    friend class AlwaysAllocateScope;
2420    friend class GCCallbacksScope;
2421    friend class GCTracer;
2422    friend class HeapIterator;
2423    friend class IdleScavengeObserver;
2424    friend class IncrementalMarking;
2425    friend class IteratePointersToFromSpaceVisitor;
2426    friend class MarkCompactCollector;
2427    friend class MarkCompactMarkingVisitor;
2428    friend class NewSpace;
2429    friend class ObjectStatsVisitor;
2430    friend class Page;
2431    friend class Scavenger;
2432    friend class StoreBuffer;
2433  
2434    // The allocator interface.
2435    friend class Factory;
2436  
2437    // The Isolate constructs us.
2438    friend class Isolate;
2439  
2440    // Used in cctest.
2441    friend class HeapTester;
2442  
2443    DISALLOW_COPY_AND_ASSIGN(Heap);
2444  };
2445  
2446  
2447  class HeapStats {
2448   public:
2449    static const int kStartMarker = 0xDECADE00;
2450    static const int kEndMarker = 0xDECADE01;
2451  
2452    int* start_marker;                       //  0
2453    int* new_space_size;                     //  1
2454    int* new_space_capacity;                 //  2
2455    intptr_t* old_space_size;                //  3
2456    intptr_t* old_space_capacity;            //  4
2457    intptr_t* code_space_size;               //  5
2458    intptr_t* code_space_capacity;           //  6
2459    intptr_t* map_space_size;                //  7
2460    intptr_t* map_space_capacity;            //  8
2461    intptr_t* lo_space_size;                 //  9
2462    int* global_handle_count;                // 10
2463    int* weak_global_handle_count;           // 11
2464    int* pending_global_handle_count;        // 12
2465    int* near_death_global_handle_count;     // 13
2466    int* free_global_handle_count;           // 14
2467    intptr_t* memory_allocator_size;         // 15
2468    intptr_t* memory_allocator_capacity;     // 16
2469    int* objects_per_type;                   // 17
2470    int* size_per_type;                      // 18
2471    int* os_error;                           // 19
2472    char* last_few_messages;                 // 20
2473    char* js_stacktrace;                     // 21
2474    int* end_marker;                         // 22
2475  };
2476  
2477  
2478  class AlwaysAllocateScope {
2479   public:
2480    explicit inline AlwaysAllocateScope(Isolate* isolate);
2481    inline ~AlwaysAllocateScope();
2482  
2483   private:
2484    Heap* heap_;
2485  };
2486  
2487  
2488  // Visitor class to verify interior pointers in spaces that do not contain
2489  // or care about intergenerational references. All heap object pointers have to
2490  // point into the heap to a location that has a map pointer at its first word.
2491  // Caveat: Heap::Contains is an approximation because it can return true for
2492  // objects in a heap space but above the allocation pointer.
2493  class VerifyPointersVisitor : public ObjectVisitor {
2494   public:
2495    inline void VisitPointers(Object** start, Object** end) override;
2496  };
2497  
2498  
2499  // Verify that all objects are Smis.
2500  class VerifySmisVisitor : public ObjectVisitor {
2501   public:
2502    inline void VisitPointers(Object** start, Object** end) override;
2503  };
2504  
2505  
2506  // Space iterator for iterating over all spaces of the heap.  Returns each space
2507  // in turn, and null when it is done.
2508  class AllSpaces BASE_EMBEDDED {
2509   public:
AllSpaces(Heap * heap)2510    explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2511    Space* next();
2512  
2513   private:
2514    Heap* heap_;
2515    int counter_;
2516  };
2517  
2518  
2519  // Space iterator for iterating over all old spaces of the heap: Old space
2520  // and code space.  Returns each space in turn, and null when it is done.
2521  class OldSpaces BASE_EMBEDDED {
2522   public:
OldSpaces(Heap * heap)2523    explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2524    OldSpace* next();
2525  
2526   private:
2527    Heap* heap_;
2528    int counter_;
2529  };
2530  
2531  
2532  // Space iterator for iterating over all the paged spaces of the heap: Map
2533  // space, old space, code space and cell space.  Returns
2534  // each space in turn, and null when it is done.
2535  class PagedSpaces BASE_EMBEDDED {
2536   public:
PagedSpaces(Heap * heap)2537    explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
2538    PagedSpace* next();
2539  
2540   private:
2541    Heap* heap_;
2542    int counter_;
2543  };
2544  
2545  
2546  // Space iterator for iterating over all spaces of the heap.
2547  // For each space an object iterator is provided. The deallocation of the
2548  // returned object iterators is handled by the space iterator.
2549  class SpaceIterator : public Malloced {
2550   public:
2551    explicit SpaceIterator(Heap* heap);
2552    virtual ~SpaceIterator();
2553  
2554    bool has_next();
2555    ObjectIterator* next();
2556  
2557   private:
2558    ObjectIterator* CreateIterator();
2559  
2560    Heap* heap_;
2561    int current_space_;         // from enum AllocationSpace.
2562    ObjectIterator* iterator_;  // object iterator for the current space.
2563  };
2564  
2565  
2566  // A HeapIterator provides iteration over the whole heap. It
2567  // aggregates the specific iterators for the different spaces as
2568  // these can only iterate over one space only.
2569  //
2570  // HeapIterator ensures there is no allocation during its lifetime
2571  // (using an embedded DisallowHeapAllocation instance).
2572  //
2573  // HeapIterator can skip free list nodes (that is, de-allocated heap
2574  // objects that still remain in the heap). As implementation of free
2575  // nodes filtering uses GC marks, it can't be used during MS/MC GC
2576  // phases. Also, it is forbidden to interrupt iteration in this mode,
2577  // as this will leave heap objects marked (and thus, unusable).
2578  class HeapIterator BASE_EMBEDDED {
2579   public:
2580    enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2581  
2582    explicit HeapIterator(Heap* heap,
2583                          HeapObjectsFiltering filtering = kNoFiltering);
2584    ~HeapIterator();
2585  
2586    HeapObject* next();
2587  
2588   private:
2589    struct MakeHeapIterableHelper {
MakeHeapIterableHelperMakeHeapIterableHelper2590      explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
2591    };
2592  
2593    HeapObject* NextObject();
2594  
2595    // The following two fields need to be declared in this order. Initialization
2596    // order guarantees that we first make the heap iterable (which may involve
2597    // allocations) and only then lock it down by not allowing further
2598    // allocations.
2599    MakeHeapIterableHelper make_heap_iterable_helper_;
2600    DisallowHeapAllocation no_heap_allocation_;
2601  
2602    Heap* heap_;
2603    HeapObjectsFiltering filtering_;
2604    HeapObjectsFilter* filter_;
2605    // Space iterator for iterating all the spaces.
2606    SpaceIterator* space_iterator_;
2607    // Object iterator for the space currently being iterated.
2608    ObjectIterator* object_iterator_;
2609  };
2610  
2611  
2612  // Cache for mapping (map, property name) into field offset.
2613  // Cleared at startup and prior to mark sweep collection.
2614  class KeyedLookupCache {
2615   public:
2616    // Lookup field offset for (map, name). If absent, -1 is returned.
2617    int Lookup(Handle<Map> map, Handle<Name> name);
2618  
2619    // Update an element in the cache.
2620    void Update(Handle<Map> map, Handle<Name> name, int field_offset);
2621  
2622    // Clear the cache.
2623    void Clear();
2624  
2625    static const int kLength = 256;
2626    static const int kCapacityMask = kLength - 1;
2627    static const int kMapHashShift = 5;
2628    static const int kHashMask = -4;  // Zero the last two bits.
2629    static const int kEntriesPerBucket = 4;
2630    static const int kEntryLength = 2;
2631    static const int kMapIndex = 0;
2632    static const int kKeyIndex = 1;
2633    static const int kNotFound = -1;
2634  
2635    // kEntriesPerBucket should be a power of 2.
2636    STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
2637    STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
2638  
2639   private:
KeyedLookupCache()2640    KeyedLookupCache() {
2641      for (int i = 0; i < kLength; ++i) {
2642        keys_[i].map = NULL;
2643        keys_[i].name = NULL;
2644        field_offsets_[i] = kNotFound;
2645      }
2646    }
2647  
2648    static inline int Hash(Handle<Map> map, Handle<Name> name);
2649  
2650    // Get the address of the keys and field_offsets arrays.  Used in
2651    // generated code to perform cache lookups.
keys_address()2652    Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2653  
field_offsets_address()2654    Address field_offsets_address() {
2655      return reinterpret_cast<Address>(&field_offsets_);
2656    }
2657  
2658    struct Key {
2659      Map* map;
2660      Name* name;
2661    };
2662  
2663    Key keys_[kLength];
2664    int field_offsets_[kLength];
2665  
2666    friend class ExternalReference;
2667    friend class Isolate;
2668    DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
2669  };
2670  
2671  
2672  // Cache for mapping (map, property name) into descriptor index.
2673  // The cache contains both positive and negative results.
2674  // Descriptor index equals kNotFound means the property is absent.
2675  // Cleared at startup and prior to any gc.
2676  class DescriptorLookupCache {
2677   public:
2678    // Lookup descriptor index for (map, name).
2679    // If absent, kAbsent is returned.
2680    inline int Lookup(Map* source, Name* name);
2681  
2682    // Update an element in the cache.
2683    inline void Update(Map* source, Name* name, int result);
2684  
2685    // Clear the cache.
2686    void Clear();
2687  
2688    static const int kAbsent = -2;
2689  
2690   private:
DescriptorLookupCache()2691    DescriptorLookupCache() {
2692      for (int i = 0; i < kLength; ++i) {
2693        keys_[i].source = NULL;
2694        keys_[i].name = NULL;
2695        results_[i] = kAbsent;
2696      }
2697    }
2698  
Hash(Object * source,Name * name)2699    static int Hash(Object* source, Name* name) {
2700      // Uses only lower 32 bits if pointers are larger.
2701      uint32_t source_hash =
2702          static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2703          kPointerSizeLog2;
2704      uint32_t name_hash =
2705          static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2706          kPointerSizeLog2;
2707      return (source_hash ^ name_hash) % kLength;
2708    }
2709  
2710    static const int kLength = 64;
2711    struct Key {
2712      Map* source;
2713      Name* name;
2714    };
2715  
2716    Key keys_[kLength];
2717    int results_[kLength];
2718  
2719    friend class Isolate;
2720    DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2721  };
2722  
2723  
2724  // Abstract base class for checking whether a weak object should be retained.
2725  class WeakObjectRetainer {
2726   public:
~WeakObjectRetainer()2727    virtual ~WeakObjectRetainer() {}
2728  
2729    // Return whether this object should be retained. If NULL is returned the
2730    // object has no references. Otherwise the address of the retained object
2731    // should be returned as in some GC situations the object has been moved.
2732    virtual Object* RetainAs(Object* object) = 0;
2733  };
2734  
2735  
2736  #ifdef DEBUG
2737  // Helper class for tracing paths to a search target Object from all roots.
2738  // The TracePathFrom() method can be used to trace paths from a specific
2739  // object to the search target object.
2740  class PathTracer : public ObjectVisitor {
2741   public:
2742    enum WhatToFind {
2743      FIND_ALL,   // Will find all matches.
2744      FIND_FIRST  // Will stop the search after first match.
2745    };
2746  
2747    // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2748    static const int kMarkTag = 2;
2749  
2750    // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2751    // after the first match.  If FIND_ALL is specified, then tracing will be
2752    // done for all matches.
PathTracer(Object * search_target,WhatToFind what_to_find,VisitMode visit_mode)2753    PathTracer(Object* search_target, WhatToFind what_to_find,
2754               VisitMode visit_mode)
2755        : search_target_(search_target),
2756          found_target_(false),
2757          found_target_in_trace_(false),
2758          what_to_find_(what_to_find),
2759          visit_mode_(visit_mode),
2760          object_stack_(20),
2761          no_allocation() {}
2762  
2763    void VisitPointers(Object** start, Object** end) override;
2764  
2765    void Reset();
2766    void TracePathFrom(Object** root);
2767  
found()2768    bool found() const { return found_target_; }
2769  
2770    static Object* const kAnyGlobalObject;
2771  
2772   protected:
2773    class MarkVisitor;
2774    class UnmarkVisitor;
2775  
2776    void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
2777    void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
2778    virtual void ProcessResults();
2779  
2780    Object* search_target_;
2781    bool found_target_;
2782    bool found_target_in_trace_;
2783    WhatToFind what_to_find_;
2784    VisitMode visit_mode_;
2785    List<Object*> object_stack_;
2786  
2787    DisallowHeapAllocation no_allocation;  // i.e. no gc allowed.
2788  
2789   private:
2790    DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2791  };
2792  #endif  // DEBUG
2793  }  // namespace internal
2794  }  // namespace v8
2795  
2796  #endif  // V8_HEAP_HEAP_H_
2797