1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/objects-visiting.h"
6 
7 #include "src/heap/heap-inl.h"
8 #include "src/heap/mark-compact-inl.h"
9 #include "src/heap/objects-visiting-inl.h"
10 
11 namespace v8 {
12 namespace internal {
13 
14 // We don't record weak slots during marking or scavenges. Instead we do it
15 // once when we complete mark-compact cycle.  Note that write barrier has no
16 // effect if we are already in the middle of compacting mark-sweep cycle and we
17 // have to record slots manually.
MustRecordSlots(Heap * heap)18 static bool MustRecordSlots(Heap* heap) {
19   return heap->gc_state() == Heap::MARK_COMPACT &&
20          heap->mark_compact_collector()->is_compacting();
21 }
22 
23 
24 template <class T>
25 struct WeakListVisitor;
26 
27 
28 template <class T>
VisitWeakList(Heap * heap,Object * list,WeakObjectRetainer * retainer)29 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
30   Object* undefined = ReadOnlyRoots(heap).undefined_value();
31   Object* head = undefined;
32   T* tail = nullptr;
33   bool record_slots = MustRecordSlots(heap);
34 
35   while (list != undefined) {
36     // Check whether to keep the candidate in the list.
37     T* candidate = reinterpret_cast<T*>(list);
38 
39     Object* retained = retainer->RetainAs(list);
40 
41     // Move to the next element before the WeakNext is cleared.
42     list = WeakListVisitor<T>::WeakNext(candidate);
43 
44     if (retained != nullptr) {
45       if (head == undefined) {
46         // First element in the list.
47         head = retained;
48       } else {
49         // Subsequent elements in the list.
50         DCHECK_NOT_NULL(tail);
51         WeakListVisitor<T>::SetWeakNext(tail, retained);
52         if (record_slots) {
53           HeapObject* slot_holder = WeakListVisitor<T>::WeakNextHolder(tail);
54           int slot_offset = WeakListVisitor<T>::WeakNextOffset();
55           Object** slot = HeapObject::RawField(slot_holder, slot_offset);
56           MarkCompactCollector::RecordSlot(slot_holder, slot,
57                                            HeapObject::cast(retained));
58         }
59       }
60       // Retained object is new tail.
61       DCHECK(!retained->IsUndefined(heap->isolate()));
62       candidate = reinterpret_cast<T*>(retained);
63       tail = candidate;
64 
65       // tail is a live object, visit it.
66       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
67 
68     } else {
69       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
70     }
71   }
72 
73   // Terminate the list if there is one or more elements.
74   if (tail != nullptr) WeakListVisitor<T>::SetWeakNext(tail, undefined);
75   return head;
76 }
77 
78 
79 template <class T>
ClearWeakList(Heap * heap,Object * list)80 static void ClearWeakList(Heap* heap, Object* list) {
81   Object* undefined = ReadOnlyRoots(heap).undefined_value();
82   while (list != undefined) {
83     T* candidate = reinterpret_cast<T*>(list);
84     list = WeakListVisitor<T>::WeakNext(candidate);
85     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
86   }
87 }
88 
89 template <>
90 struct WeakListVisitor<Code> {
SetWeakNextv8::internal::WeakListVisitor91   static void SetWeakNext(Code* code, Object* next) {
92     code->code_data_container()->set_next_code_link(next,
93                                                     UPDATE_WEAK_WRITE_BARRIER);
94   }
95 
WeakNextv8::internal::WeakListVisitor96   static Object* WeakNext(Code* code) {
97     return code->code_data_container()->next_code_link();
98   }
99 
WeakNextHolderv8::internal::WeakListVisitor100   static HeapObject* WeakNextHolder(Code* code) {
101     return code->code_data_container();
102   }
103 
WeakNextOffsetv8::internal::WeakListVisitor104   static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; }
105 
VisitLiveObjectv8::internal::WeakListVisitor106   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
107 
VisitPhantomObjectv8::internal::WeakListVisitor108   static void VisitPhantomObject(Heap* heap, Code* code) {
109     // Even though the code is dying, its code_data_container can still be
110     // alive. Clear the next_code_link slot to avoid a dangling pointer.
111     SetWeakNext(code, ReadOnlyRoots(heap).undefined_value());
112   }
113 };
114 
115 
116 template <>
117 struct WeakListVisitor<Context> {
SetWeakNextv8::internal::WeakListVisitor118   static void SetWeakNext(Context* context, Object* next) {
119     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
120   }
121 
WeakNextv8::internal::WeakListVisitor122   static Object* WeakNext(Context* context) {
123     return context->next_context_link();
124   }
125 
WeakNextHolderv8::internal::WeakListVisitor126   static HeapObject* WeakNextHolder(Context* context) { return context; }
127 
WeakNextOffsetv8::internal::WeakListVisitor128   static int WeakNextOffset() {
129     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
130   }
131 
VisitLiveObjectv8::internal::WeakListVisitor132   static void VisitLiveObject(Heap* heap, Context* context,
133                               WeakObjectRetainer* retainer) {
134     if (heap->gc_state() == Heap::MARK_COMPACT) {
135       // Record the slots of the weak entries in the native context.
136       for (int idx = Context::FIRST_WEAK_SLOT;
137            idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
138         Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
139         MarkCompactCollector::RecordSlot(context, slot,
140                                          HeapObject::cast(*slot));
141       }
142       // Code objects are always allocated in Code space, we do not have to
143       // visit them during scavenges.
144       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
145       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
146     }
147   }
148 
149   template <class T>
DoWeakListv8::internal::WeakListVisitor150   static void DoWeakList(Heap* heap, Context* context,
151                          WeakObjectRetainer* retainer, int index) {
152     // Visit the weak list, removing dead intermediate elements.
153     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
154 
155     // Update the list head.
156     context->set(index, list_head, UPDATE_WRITE_BARRIER);
157 
158     if (MustRecordSlots(heap)) {
159       // Record the updated slot if necessary.
160       Object** head_slot =
161           HeapObject::RawField(context, FixedArray::SizeFor(index));
162       heap->mark_compact_collector()->RecordSlot(context, head_slot,
163                                                  HeapObject::cast(list_head));
164     }
165   }
166 
VisitPhantomObjectv8::internal::WeakListVisitor167   static void VisitPhantomObject(Heap* heap, Context* context) {
168     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
169     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
170   }
171 };
172 
173 
174 template <>
175 struct WeakListVisitor<AllocationSite> {
SetWeakNextv8::internal::WeakListVisitor176   static void SetWeakNext(AllocationSite* obj, Object* next) {
177     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
178   }
179 
WeakNextv8::internal::WeakListVisitor180   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
181 
WeakNextHolderv8::internal::WeakListVisitor182   static HeapObject* WeakNextHolder(AllocationSite* obj) { return obj; }
183 
WeakNextOffsetv8::internal::WeakListVisitor184   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
185 
VisitLiveObjectv8::internal::WeakListVisitor186   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
187 
VisitPhantomObjectv8::internal::WeakListVisitor188   static void VisitPhantomObject(Heap*, AllocationSite*) {}
189 };
190 
191 
192 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
193                                         WeakObjectRetainer* retainer);
194 
195 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
196                                                WeakObjectRetainer* retainer);
197 }  // namespace internal
198 }  // namespace v8
199