1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/objects-visiting.h"
6
7 #include "src/heap/mark-compact-inl.h"
8 #include "src/heap/objects-visiting-inl.h"
9
10 namespace v8 {
11 namespace internal {
12
13
GetVisitorId(Map * map)14 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
15 return GetVisitorId(map->instance_type(), map->instance_size(),
16 FLAG_unbox_double_fields && !map->HasFastPointerLayout());
17 }
18
19
GetVisitorId(int instance_type,int instance_size,bool has_unboxed_fields)20 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
21 int instance_type, int instance_size, bool has_unboxed_fields) {
22 if (instance_type < FIRST_NONSTRING_TYPE) {
23 switch (instance_type & kStringRepresentationMask) {
24 case kSeqStringTag:
25 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26 return kVisitSeqOneByteString;
27 } else {
28 return kVisitSeqTwoByteString;
29 }
30
31 case kConsStringTag:
32 if (IsShortcutCandidate(instance_type)) {
33 return kVisitShortcutCandidate;
34 } else {
35 return kVisitConsString;
36 }
37
38 case kSlicedStringTag:
39 return kVisitSlicedString;
40
41 case kExternalStringTag:
42 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
43 instance_size, has_unboxed_fields);
44 }
45 UNREACHABLE();
46 }
47
48 switch (instance_type) {
49 case BYTE_ARRAY_TYPE:
50 return kVisitByteArray;
51
52 case BYTECODE_ARRAY_TYPE:
53 return kVisitBytecodeArray;
54
55 case FREE_SPACE_TYPE:
56 return kVisitFreeSpace;
57
58 case FIXED_ARRAY_TYPE:
59 return kVisitFixedArray;
60
61 case FIXED_DOUBLE_ARRAY_TYPE:
62 return kVisitFixedDoubleArray;
63
64 case ODDBALL_TYPE:
65 return kVisitOddball;
66
67 case MAP_TYPE:
68 return kVisitMap;
69
70 case CODE_TYPE:
71 return kVisitCode;
72
73 case CELL_TYPE:
74 return kVisitCell;
75
76 case PROPERTY_CELL_TYPE:
77 return kVisitPropertyCell;
78
79 case WEAK_CELL_TYPE:
80 return kVisitWeakCell;
81
82 case TRANSITION_ARRAY_TYPE:
83 return kVisitTransitionArray;
84
85 case JS_WEAK_MAP_TYPE:
86 case JS_WEAK_SET_TYPE:
87 return kVisitJSWeakCollection;
88
89 case JS_REGEXP_TYPE:
90 return kVisitJSRegExp;
91
92 case SHARED_FUNCTION_INFO_TYPE:
93 return kVisitSharedFunctionInfo;
94
95 case JS_PROXY_TYPE:
96 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
97 instance_size, has_unboxed_fields);
98
99 case SYMBOL_TYPE:
100 return kVisitSymbol;
101
102 case JS_ARRAY_BUFFER_TYPE:
103 return kVisitJSArrayBuffer;
104
105 case JS_OBJECT_TYPE:
106 case JS_ERROR_TYPE:
107 case JS_ARGUMENTS_TYPE:
108 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
109 case JS_GENERATOR_OBJECT_TYPE:
110 case JS_FIXED_ARRAY_ITERATOR_TYPE:
111 case JS_MODULE_NAMESPACE_TYPE:
112 case JS_VALUE_TYPE:
113 case JS_DATE_TYPE:
114 case JS_ARRAY_TYPE:
115 case JS_GLOBAL_PROXY_TYPE:
116 case JS_GLOBAL_OBJECT_TYPE:
117 case JS_MESSAGE_OBJECT_TYPE:
118 case JS_TYPED_ARRAY_TYPE:
119 case JS_DATA_VIEW_TYPE:
120 case JS_SET_TYPE:
121 case JS_MAP_TYPE:
122 case JS_SET_ITERATOR_TYPE:
123 case JS_MAP_ITERATOR_TYPE:
124 case JS_STRING_ITERATOR_TYPE:
125
126 case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
127 case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
128 case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
129 case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
130 case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
131 case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
132 case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
133 case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
134 case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
135 case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
136 case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
137 case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
138 case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
139 case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
140 case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
141 case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
142 case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
143 case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
144 case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
145 case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
146 case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
147 case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
148 case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
149 case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
150 case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
151 case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
152 case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
153 case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
154 case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
155 case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
156 case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
157 case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
158 case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
159 case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
160 case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE:
161
162 case JS_PROMISE_TYPE:
163 case JS_BOUND_FUNCTION_TYPE:
164 return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
165 instance_size, has_unboxed_fields);
166 case JS_API_OBJECT_TYPE:
167 case JS_SPECIAL_API_OBJECT_TYPE:
168 return GetVisitorIdForSize(kVisitJSApiObject, kVisitJSApiObjectGeneric,
169 instance_size, has_unboxed_fields);
170
171 case JS_FUNCTION_TYPE:
172 return kVisitJSFunction;
173
174 case FILLER_TYPE:
175 if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
176 // Fall through.
177 case FOREIGN_TYPE:
178 case HEAP_NUMBER_TYPE:
179 case MUTABLE_HEAP_NUMBER_TYPE:
180 case SIMD128_VALUE_TYPE:
181 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
182 instance_size, has_unboxed_fields);
183
184 case FIXED_UINT8_ARRAY_TYPE:
185 case FIXED_INT8_ARRAY_TYPE:
186 case FIXED_UINT16_ARRAY_TYPE:
187 case FIXED_INT16_ARRAY_TYPE:
188 case FIXED_UINT32_ARRAY_TYPE:
189 case FIXED_INT32_ARRAY_TYPE:
190 case FIXED_FLOAT32_ARRAY_TYPE:
191 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
192 return kVisitFixedTypedArray;
193
194 case FIXED_FLOAT64_ARRAY_TYPE:
195 return kVisitFixedFloat64Array;
196
197 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
198 STRUCT_LIST(MAKE_STRUCT_CASE)
199 #undef MAKE_STRUCT_CASE
200 if (instance_type == ALLOCATION_SITE_TYPE) {
201 return kVisitAllocationSite;
202 }
203
204 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
205 instance_size, has_unboxed_fields);
206
207 default:
208 UNREACHABLE();
209 return kVisitorIdCount;
210 }
211 }
212
213
214 // We don't record weak slots during marking or scavenges. Instead we do it
215 // once when we complete mark-compact cycle. Note that write barrier has no
216 // effect if we are already in the middle of compacting mark-sweep cycle and we
217 // have to record slots manually.
MustRecordSlots(Heap * heap)218 static bool MustRecordSlots(Heap* heap) {
219 return heap->gc_state() == Heap::MARK_COMPACT &&
220 heap->mark_compact_collector()->is_compacting();
221 }
222
223
224 template <class T>
225 struct WeakListVisitor;
226
227
228 template <class T>
VisitWeakList(Heap * heap,Object * list,WeakObjectRetainer * retainer)229 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
230 Object* undefined = heap->undefined_value();
231 Object* head = undefined;
232 T* tail = NULL;
233 MarkCompactCollector* collector = heap->mark_compact_collector();
234 bool record_slots = MustRecordSlots(heap);
235
236 while (list != undefined) {
237 // Check whether to keep the candidate in the list.
238 T* candidate = reinterpret_cast<T*>(list);
239
240 Object* retained = retainer->RetainAs(list);
241 if (retained != NULL) {
242 if (head == undefined) {
243 // First element in the list.
244 head = retained;
245 } else {
246 // Subsequent elements in the list.
247 DCHECK(tail != NULL);
248 WeakListVisitor<T>::SetWeakNext(tail, retained);
249 if (record_slots) {
250 Object** next_slot =
251 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
252 collector->RecordSlot(tail, next_slot, retained);
253 }
254 }
255 // Retained object is new tail.
256 DCHECK(!retained->IsUndefined(heap->isolate()));
257 candidate = reinterpret_cast<T*>(retained);
258 tail = candidate;
259
260 // tail is a live object, visit it.
261 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
262
263 } else {
264 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
265 }
266
267 // Move to next element in the list.
268 list = WeakListVisitor<T>::WeakNext(candidate);
269 }
270
271 // Terminate the list if there is one or more elements.
272 if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
273 return head;
274 }
275
276
277 template <class T>
ClearWeakList(Heap * heap,Object * list)278 static void ClearWeakList(Heap* heap, Object* list) {
279 Object* undefined = heap->undefined_value();
280 while (list != undefined) {
281 T* candidate = reinterpret_cast<T*>(list);
282 list = WeakListVisitor<T>::WeakNext(candidate);
283 WeakListVisitor<T>::SetWeakNext(candidate, undefined);
284 }
285 }
286
287
288 template <>
289 struct WeakListVisitor<JSFunction> {
SetWeakNextv8::internal::WeakListVisitor290 static void SetWeakNext(JSFunction* function, Object* next) {
291 function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
292 }
293
WeakNextv8::internal::WeakListVisitor294 static Object* WeakNext(JSFunction* function) {
295 return function->next_function_link();
296 }
297
WeakNextOffsetv8::internal::WeakListVisitor298 static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
299
VisitLiveObjectv8::internal::WeakListVisitor300 static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
301
VisitPhantomObjectv8::internal::WeakListVisitor302 static void VisitPhantomObject(Heap*, JSFunction*) {}
303 };
304
305
306 template <>
307 struct WeakListVisitor<Code> {
SetWeakNextv8::internal::WeakListVisitor308 static void SetWeakNext(Code* code, Object* next) {
309 code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
310 }
311
WeakNextv8::internal::WeakListVisitor312 static Object* WeakNext(Code* code) { return code->next_code_link(); }
313
WeakNextOffsetv8::internal::WeakListVisitor314 static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
315
VisitLiveObjectv8::internal::WeakListVisitor316 static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
317
VisitPhantomObjectv8::internal::WeakListVisitor318 static void VisitPhantomObject(Heap*, Code*) {}
319 };
320
321
322 template <>
323 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor324 static void SetWeakNext(Context* context, Object* next) {
325 context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
326 }
327
WeakNextv8::internal::WeakListVisitor328 static Object* WeakNext(Context* context) {
329 return context->next_context_link();
330 }
331
WeakNextOffsetv8::internal::WeakListVisitor332 static int WeakNextOffset() {
333 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
334 }
335
VisitLiveObjectv8::internal::WeakListVisitor336 static void VisitLiveObject(Heap* heap, Context* context,
337 WeakObjectRetainer* retainer) {
338 // Process the three weak lists linked off the context.
339 DoWeakList<JSFunction>(heap, context, retainer,
340 Context::OPTIMIZED_FUNCTIONS_LIST);
341
342 if (heap->gc_state() == Heap::MARK_COMPACT) {
343 // Record the slots of the weak entries in the native context.
344 MarkCompactCollector* collector = heap->mark_compact_collector();
345 for (int idx = Context::FIRST_WEAK_SLOT;
346 idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
347 Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
348 collector->RecordSlot(context, slot, *slot);
349 }
350 // Code objects are always allocated in Code space, we do not have to
351 // visit
352 // them during scavenges.
353 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
354 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
355 }
356 }
357
358 template <class T>
DoWeakListv8::internal::WeakListVisitor359 static void DoWeakList(Heap* heap, Context* context,
360 WeakObjectRetainer* retainer, int index) {
361 // Visit the weak list, removing dead intermediate elements.
362 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
363
364 // Update the list head.
365 context->set(index, list_head, UPDATE_WRITE_BARRIER);
366
367 if (MustRecordSlots(heap)) {
368 // Record the updated slot if necessary.
369 Object** head_slot =
370 HeapObject::RawField(context, FixedArray::SizeFor(index));
371 heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
372 }
373 }
374
VisitPhantomObjectv8::internal::WeakListVisitor375 static void VisitPhantomObject(Heap* heap, Context* context) {
376 ClearWeakList<JSFunction>(heap,
377 context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
378 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
379 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
380 }
381 };
382
383
384 template <>
385 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor386 static void SetWeakNext(AllocationSite* obj, Object* next) {
387 obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
388 }
389
WeakNextv8::internal::WeakListVisitor390 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
391
WeakNextOffsetv8::internal::WeakListVisitor392 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
393
VisitLiveObjectv8::internal::WeakListVisitor394 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
395
VisitPhantomObjectv8::internal::WeakListVisitor396 static void VisitPhantomObject(Heap*, AllocationSite*) {}
397 };
398
399
400 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
401 WeakObjectRetainer* retainer);
402
403 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
404 WeakObjectRetainer* retainer);
405 } // namespace internal
406 } // namespace v8
407