1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/objects-visiting.h"
6 
7 #include "src/heap/heap-inl.h"
8 #include "src/heap/mark-compact-inl.h"
9 #include "src/heap/objects-visiting-inl.h"
10 
11 namespace v8 {
12 namespace internal {
13 
14 
GetVisitorId(Map * map)15 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
16   return GetVisitorId(map->instance_type(), map->instance_size(),
17                       FLAG_unbox_double_fields && !map->HasFastPointerLayout());
18 }
19 
20 
GetVisitorId(int instance_type,int instance_size,bool has_unboxed_fields)21 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
22     int instance_type, int instance_size, bool has_unboxed_fields) {
23   if (instance_type < FIRST_NONSTRING_TYPE) {
24     switch (instance_type & kStringRepresentationMask) {
25       case kSeqStringTag:
26         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
27           return kVisitSeqOneByteString;
28         } else {
29           return kVisitSeqTwoByteString;
30         }
31 
32       case kConsStringTag:
33         if (IsShortcutCandidate(instance_type)) {
34           return kVisitShortcutCandidate;
35         } else {
36           return kVisitConsString;
37         }
38 
39       case kSlicedStringTag:
40         return kVisitSlicedString;
41 
42       case kExternalStringTag:
43         return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
44                                    instance_size, has_unboxed_fields);
45 
46       case kThinStringTag:
47         return kVisitThinString;
48     }
49     UNREACHABLE();
50   }
51 
52   switch (instance_type) {
53     case BYTE_ARRAY_TYPE:
54       return kVisitByteArray;
55 
56     case BYTECODE_ARRAY_TYPE:
57       return kVisitBytecodeArray;
58 
59     case FREE_SPACE_TYPE:
60       return kVisitFreeSpace;
61 
62     case FIXED_ARRAY_TYPE:
63       return kVisitFixedArray;
64 
65     case FIXED_DOUBLE_ARRAY_TYPE:
66       return kVisitFixedDoubleArray;
67 
68     case ODDBALL_TYPE:
69       return kVisitOddball;
70 
71     case MAP_TYPE:
72       return kVisitMap;
73 
74     case CODE_TYPE:
75       return kVisitCode;
76 
77     case CELL_TYPE:
78       return kVisitCell;
79 
80     case PROPERTY_CELL_TYPE:
81       return kVisitPropertyCell;
82 
83     case WEAK_CELL_TYPE:
84       return kVisitWeakCell;
85 
86     case TRANSITION_ARRAY_TYPE:
87       return kVisitTransitionArray;
88 
89     case JS_WEAK_MAP_TYPE:
90     case JS_WEAK_SET_TYPE:
91       return kVisitJSWeakCollection;
92 
93     case JS_REGEXP_TYPE:
94       return kVisitJSRegExp;
95 
96     case SHARED_FUNCTION_INFO_TYPE:
97       return kVisitSharedFunctionInfo;
98 
99     case JS_PROXY_TYPE:
100       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
101                                  instance_size, has_unboxed_fields);
102 
103     case SYMBOL_TYPE:
104       return kVisitSymbol;
105 
106     case JS_ARRAY_BUFFER_TYPE:
107       return kVisitJSArrayBuffer;
108 
109     case JS_OBJECT_TYPE:
110     case JS_ERROR_TYPE:
111     case JS_ARGUMENTS_TYPE:
112     case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
113     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
114     case JS_GENERATOR_OBJECT_TYPE:
115     case JS_MODULE_NAMESPACE_TYPE:
116     case JS_VALUE_TYPE:
117     case JS_DATE_TYPE:
118     case JS_ARRAY_TYPE:
119     case JS_GLOBAL_PROXY_TYPE:
120     case JS_GLOBAL_OBJECT_TYPE:
121     case JS_MESSAGE_OBJECT_TYPE:
122     case JS_TYPED_ARRAY_TYPE:
123     case JS_DATA_VIEW_TYPE:
124     case JS_SET_TYPE:
125     case JS_MAP_TYPE:
126     case JS_SET_ITERATOR_TYPE:
127     case JS_MAP_ITERATOR_TYPE:
128     case JS_STRING_ITERATOR_TYPE:
129 
130     case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
131     case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
132     case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
133     case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
134     case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
135     case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
136     case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
137     case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
138     case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
139     case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
140     case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
141     case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
142     case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
143     case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
144     case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
145     case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
146     case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
147     case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
148     case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
149     case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
150     case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
151     case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
152     case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
153     case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
154     case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
155     case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
156     case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
157     case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
158     case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
159     case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
160     case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
161     case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
162     case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
163     case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
164     case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE:
165 
166     case JS_PROMISE_CAPABILITY_TYPE:
167     case JS_PROMISE_TYPE:
168     case JS_BOUND_FUNCTION_TYPE:
169       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
170                                  instance_size, has_unboxed_fields);
171     case JS_API_OBJECT_TYPE:
172     case JS_SPECIAL_API_OBJECT_TYPE:
173       return GetVisitorIdForSize(kVisitJSApiObject, kVisitJSApiObjectGeneric,
174                                  instance_size, has_unboxed_fields);
175 
176     case JS_FUNCTION_TYPE:
177       return kVisitJSFunction;
178 
179     case FILLER_TYPE:
180       if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
181     // Fall through.
182     case FOREIGN_TYPE:
183     case HEAP_NUMBER_TYPE:
184     case MUTABLE_HEAP_NUMBER_TYPE:
185       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
186                                  instance_size, has_unboxed_fields);
187 
188     case FIXED_UINT8_ARRAY_TYPE:
189     case FIXED_INT8_ARRAY_TYPE:
190     case FIXED_UINT16_ARRAY_TYPE:
191     case FIXED_INT16_ARRAY_TYPE:
192     case FIXED_UINT32_ARRAY_TYPE:
193     case FIXED_INT32_ARRAY_TYPE:
194     case FIXED_FLOAT32_ARRAY_TYPE:
195     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
196       return kVisitFixedTypedArray;
197 
198     case FIXED_FLOAT64_ARRAY_TYPE:
199       return kVisitFixedFloat64Array;
200 
201 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
202       STRUCT_LIST(MAKE_STRUCT_CASE)
203 #undef MAKE_STRUCT_CASE
204       if (instance_type == ALLOCATION_SITE_TYPE) {
205         return kVisitAllocationSite;
206       }
207 
208       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
209                                  instance_size, has_unboxed_fields);
210 
211     default:
212       UNREACHABLE();
213       return kVisitorIdCount;
214   }
215 }
216 
217 
218 // We don't record weak slots during marking or scavenges. Instead we do it
219 // once when we complete mark-compact cycle.  Note that write barrier has no
220 // effect if we are already in the middle of compacting mark-sweep cycle and we
221 // have to record slots manually.
MustRecordSlots(Heap * heap)222 static bool MustRecordSlots(Heap* heap) {
223   return heap->gc_state() == Heap::MARK_COMPACT &&
224          heap->mark_compact_collector()->is_compacting();
225 }
226 
227 
228 template <class T>
229 struct WeakListVisitor;
230 
231 
232 template <class T>
VisitWeakList(Heap * heap,Object * list,WeakObjectRetainer * retainer)233 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
234   Object* undefined = heap->undefined_value();
235   Object* head = undefined;
236   T* tail = NULL;
237   MarkCompactCollector* collector = heap->mark_compact_collector();
238   bool record_slots = MustRecordSlots(heap);
239 
240   while (list != undefined) {
241     // Check whether to keep the candidate in the list.
242     T* candidate = reinterpret_cast<T*>(list);
243 
244     Object* retained = retainer->RetainAs(list);
245     if (retained != NULL) {
246       if (head == undefined) {
247         // First element in the list.
248         head = retained;
249       } else {
250         // Subsequent elements in the list.
251         DCHECK(tail != NULL);
252         WeakListVisitor<T>::SetWeakNext(tail, retained);
253         if (record_slots) {
254           Object** next_slot =
255               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
256           collector->RecordSlot(tail, next_slot, retained);
257         }
258       }
259       // Retained object is new tail.
260       DCHECK(!retained->IsUndefined(heap->isolate()));
261       candidate = reinterpret_cast<T*>(retained);
262       tail = candidate;
263 
264       // tail is a live object, visit it.
265       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
266 
267     } else {
268       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
269     }
270 
271     // Move to next element in the list.
272     list = WeakListVisitor<T>::WeakNext(candidate);
273   }
274 
275   // Terminate the list if there is one or more elements.
276   if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
277   return head;
278 }
279 
280 
281 template <class T>
ClearWeakList(Heap * heap,Object * list)282 static void ClearWeakList(Heap* heap, Object* list) {
283   Object* undefined = heap->undefined_value();
284   while (list != undefined) {
285     T* candidate = reinterpret_cast<T*>(list);
286     list = WeakListVisitor<T>::WeakNext(candidate);
287     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
288   }
289 }
290 
291 
292 template <>
293 struct WeakListVisitor<JSFunction> {
SetWeakNextv8::internal::WeakListVisitor294   static void SetWeakNext(JSFunction* function, Object* next) {
295     function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
296   }
297 
WeakNextv8::internal::WeakListVisitor298   static Object* WeakNext(JSFunction* function) {
299     return function->next_function_link();
300   }
301 
WeakNextOffsetv8::internal::WeakListVisitor302   static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
303 
VisitLiveObjectv8::internal::WeakListVisitor304   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
305 
VisitPhantomObjectv8::internal::WeakListVisitor306   static void VisitPhantomObject(Heap*, JSFunction*) {}
307 };
308 
309 
310 template <>
311 struct WeakListVisitor<Code> {
SetWeakNextv8::internal::WeakListVisitor312   static void SetWeakNext(Code* code, Object* next) {
313     code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
314   }
315 
WeakNextv8::internal::WeakListVisitor316   static Object* WeakNext(Code* code) { return code->next_code_link(); }
317 
WeakNextOffsetv8::internal::WeakListVisitor318   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
319 
VisitLiveObjectv8::internal::WeakListVisitor320   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
321 
VisitPhantomObjectv8::internal::WeakListVisitor322   static void VisitPhantomObject(Heap*, Code*) {}
323 };
324 
325 
326 template <>
327 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor328   static void SetWeakNext(Context* context, Object* next) {
329     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
330   }
331 
WeakNextv8::internal::WeakListVisitor332   static Object* WeakNext(Context* context) {
333     return context->next_context_link();
334   }
335 
WeakNextOffsetv8::internal::WeakListVisitor336   static int WeakNextOffset() {
337     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
338   }
339 
VisitLiveObjectv8::internal::WeakListVisitor340   static void VisitLiveObject(Heap* heap, Context* context,
341                               WeakObjectRetainer* retainer) {
342     // Process the three weak lists linked off the context.
343     DoWeakList<JSFunction>(heap, context, retainer,
344                            Context::OPTIMIZED_FUNCTIONS_LIST);
345 
346     if (heap->gc_state() == Heap::MARK_COMPACT) {
347       // Record the slots of the weak entries in the native context.
348       MarkCompactCollector* collector = heap->mark_compact_collector();
349       for (int idx = Context::FIRST_WEAK_SLOT;
350            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
351         Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
352         collector->RecordSlot(context, slot, *slot);
353       }
354       // Code objects are always allocated in Code space, we do not have to
355       // visit
356       // them during scavenges.
357       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
358       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
359     }
360   }
361 
362   template <class T>
DoWeakListv8::internal::WeakListVisitor363   static void DoWeakList(Heap* heap, Context* context,
364                          WeakObjectRetainer* retainer, int index) {
365     // Visit the weak list, removing dead intermediate elements.
366     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
367 
368     // Update the list head.
369     context->set(index, list_head, UPDATE_WRITE_BARRIER);
370 
371     if (MustRecordSlots(heap)) {
372       // Record the updated slot if necessary.
373       Object** head_slot =
374           HeapObject::RawField(context, FixedArray::SizeFor(index));
375       heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
376     }
377   }
378 
VisitPhantomObjectv8::internal::WeakListVisitor379   static void VisitPhantomObject(Heap* heap, Context* context) {
380     ClearWeakList<JSFunction>(heap,
381                               context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
382     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
383     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
384   }
385 };
386 
387 
388 template <>
389 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor390   static void SetWeakNext(AllocationSite* obj, Object* next) {
391     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
392   }
393 
WeakNextv8::internal::WeakListVisitor394   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
395 
WeakNextOffsetv8::internal::WeakListVisitor396   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
397 
VisitLiveObjectv8::internal::WeakListVisitor398   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
399 
VisitPhantomObjectv8::internal::WeakListVisitor400   static void VisitPhantomObject(Heap*, AllocationSite*) {}
401 };
402 
403 
404 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
405                                         WeakObjectRetainer* retainer);
406 
407 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
408                                                WeakObjectRetainer* retainer);
409 }  // namespace internal
410 }  // namespace v8
411