1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
7 
8 #include "src/allocation.h"
9 
10 // This file provides base classes and auxiliary methods for defining
11 // static object visitors used during GC.
12 // Visiting HeapObject body with a normal ObjectVisitor requires performing
13 // two switches on object's instance type to determine object size and layout
14 // and one or more virtual method calls on visitor itself.
15 // Static visitor is different: it provides a dispatch table which contains
16 // pointers to specialized visit functions. Each map has the visitor_id
17 // field which contains an index of specialized visitor to use.
18 
19 namespace v8 {
20 namespace internal {
21 
22 
23 // Base class for all static visitors.
24 class StaticVisitorBase : public AllStatic {
25  public:
26 #define VISITOR_ID_LIST(V) \
27   V(SeqOneByteString)      \
28   V(SeqTwoByteString)      \
29   V(ShortcutCandidate)     \
30   V(ByteArray)             \
31   V(FreeSpace)             \
32   V(FixedArray)            \
33   V(FixedDoubleArray)      \
34   V(FixedTypedArray)       \
35   V(FixedFloat64Array)     \
36   V(ConstantPoolArray)     \
37   V(NativeContext)         \
38   V(AllocationSite)        \
39   V(DataObject2)           \
40   V(DataObject3)           \
41   V(DataObject4)           \
42   V(DataObject5)           \
43   V(DataObject6)           \
44   V(DataObject7)           \
45   V(DataObject8)           \
46   V(DataObject9)           \
47   V(DataObjectGeneric)     \
48   V(JSObject2)             \
49   V(JSObject3)             \
50   V(JSObject4)             \
51   V(JSObject5)             \
52   V(JSObject6)             \
53   V(JSObject7)             \
54   V(JSObject8)             \
55   V(JSObject9)             \
56   V(JSObjectGeneric)       \
57   V(Struct2)               \
58   V(Struct3)               \
59   V(Struct4)               \
60   V(Struct5)               \
61   V(Struct6)               \
62   V(Struct7)               \
63   V(Struct8)               \
64   V(Struct9)               \
65   V(StructGeneric)         \
66   V(ConsString)            \
67   V(SlicedString)          \
68   V(Symbol)                \
69   V(Oddball)               \
70   V(Code)                  \
71   V(Map)                   \
72   V(Cell)                  \
73   V(PropertyCell)          \
74   V(SharedFunctionInfo)    \
75   V(JSFunction)            \
76   V(JSWeakCollection)      \
77   V(JSArrayBuffer)         \
78   V(JSTypedArray)          \
79   V(JSDataView)            \
80   V(JSRegExp)
81 
82   // For data objects, JS objects and structs along with generic visitor which
83   // can visit object of any size we provide visitors specialized by
84   // object size in words.
85   // Ids of specialized visitors are declared in a linear order (without
86   // holes) starting from the id of visitor specialized for 2 words objects
87   // (base visitor id) and ending with the id of generic visitor.
88   // Method GetVisitorIdForSize depends on this ordering to calculate visitor
89   // id of specialized visitor from given instance size, base visitor id and
90   // generic visitor's id.
91   enum VisitorId {
92 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
93     VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
94 #undef VISITOR_ID_ENUM_DECL
95     kVisitorIdCount,
96     kVisitDataObject = kVisitDataObject2,
97     kVisitJSObject = kVisitJSObject2,
98     kVisitStruct = kVisitStruct2,
99     kMinObjectSizeInWords = 2
100   };
101 
102   // Visitor ID should fit in one byte.
103   STATIC_ASSERT(kVisitorIdCount <= 256);
104 
105   // Determine which specialized visitor should be used for given instance type
106   // and instance type.
107   static VisitorId GetVisitorId(int instance_type, int instance_size);
108 
GetVisitorId(Map * map)109   static VisitorId GetVisitorId(Map* map) {
110     return GetVisitorId(map->instance_type(), map->instance_size());
111   }
112 
113   // For visitors that allow specialization by size calculate VisitorId based
114   // on size, base visitor id and generic visitor id.
GetVisitorIdForSize(VisitorId base,VisitorId generic,int object_size)115   static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
116                                        int object_size) {
117     DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
118            (base == kVisitJSObject));
119     DCHECK(IsAligned(object_size, kPointerSize));
120     DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
121     DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
122 
123     const VisitorId specialization = static_cast<VisitorId>(
124         base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
125 
126     return Min(specialization, generic);
127   }
128 };
129 
130 
131 template <typename Callback>
132 class VisitorDispatchTable {
133  public:
CopyFrom(VisitorDispatchTable * other)134   void CopyFrom(VisitorDispatchTable* other) {
135     // We are not using memcpy to guarantee that during update
136     // every element of callbacks_ array will remain correct
137     // pointer (memcpy might be implemented as a byte copying loop).
138     for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
139       base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
140     }
141   }
142 
GetVisitorById(StaticVisitorBase::VisitorId id)143   inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
144     return reinterpret_cast<Callback>(callbacks_[id]);
145   }
146 
GetVisitor(Map * map)147   inline Callback GetVisitor(Map* map) {
148     return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
149   }
150 
Register(StaticVisitorBase::VisitorId id,Callback callback)151   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
152     DCHECK(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
153     callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
154   }
155 
156   template <typename Visitor, StaticVisitorBase::VisitorId base,
157             StaticVisitorBase::VisitorId generic, int object_size_in_words>
RegisterSpecialization()158   void RegisterSpecialization() {
159     static const int size = object_size_in_words * kPointerSize;
160     Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
161              &Visitor::template VisitSpecialized<size>);
162   }
163 
164 
165   template <typename Visitor, StaticVisitorBase::VisitorId base,
166             StaticVisitorBase::VisitorId generic>
RegisterSpecializations()167   void RegisterSpecializations() {
168     STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
169                   10);
170     RegisterSpecialization<Visitor, base, generic, 2>();
171     RegisterSpecialization<Visitor, base, generic, 3>();
172     RegisterSpecialization<Visitor, base, generic, 4>();
173     RegisterSpecialization<Visitor, base, generic, 5>();
174     RegisterSpecialization<Visitor, base, generic, 6>();
175     RegisterSpecialization<Visitor, base, generic, 7>();
176     RegisterSpecialization<Visitor, base, generic, 8>();
177     RegisterSpecialization<Visitor, base, generic, 9>();
178     Register(generic, &Visitor::Visit);
179   }
180 
181  private:
182   base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
183 };
184 
185 
186 template <typename StaticVisitor>
187 class BodyVisitorBase : public AllStatic {
188  public:
INLINE(static void IteratePointers (Heap * heap,HeapObject * object,int start_offset,int end_offset))189   INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
190                                      int start_offset, int end_offset)) {
191     Object** start_slot =
192         reinterpret_cast<Object**>(object->address() + start_offset);
193     Object** end_slot =
194         reinterpret_cast<Object**>(object->address() + end_offset);
195     StaticVisitor::VisitPointers(heap, start_slot, end_slot);
196   }
197 };
198 
199 
200 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
201 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
202  public:
INLINE(static ReturnType Visit (Map * map,HeapObject * object))203   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
204     int object_size = BodyDescriptor::SizeOf(map, object);
205     BodyVisitorBase<StaticVisitor>::IteratePointers(
206         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
207     return static_cast<ReturnType>(object_size);
208   }
209 
210   template <int object_size>
VisitSpecialized(Map * map,HeapObject * object)211   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
212     DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
213     BodyVisitorBase<StaticVisitor>::IteratePointers(
214         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
215     return static_cast<ReturnType>(object_size);
216   }
217 };
218 
219 
220 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
221 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
222  public:
INLINE(static ReturnType Visit (Map * map,HeapObject * object))223   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
224     BodyVisitorBase<StaticVisitor>::IteratePointers(
225         map->GetHeap(), object, BodyDescriptor::kStartOffset,
226         BodyDescriptor::kEndOffset);
227     return static_cast<ReturnType>(BodyDescriptor::kSize);
228   }
229 };
230 
231 
232 // Base class for visitors used for a linear new space iteration.
233 // IterateBody returns size of visited object.
234 // Certain types of objects (i.e. Code objects) are not handled
235 // by dispatch table of this visitor because they cannot appear
236 // in the new space.
237 //
238 // This class is intended to be used in the following way:
239 //
240 //   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
241 //     ...
242 //   }
243 //
244 // This is an example of Curiously recurring template pattern
245 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
246 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
247 // inlining and specialization of StaticVisitor::VisitPointers methods).
248 template <typename StaticVisitor>
249 class StaticNewSpaceVisitor : public StaticVisitorBase {
250  public:
251   static void Initialize();
252 
INLINE(static int IterateBody (Map * map,HeapObject * obj))253   INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
254     return table_.GetVisitor(map)(map, obj);
255   }
256 
INLINE(static void VisitPointers (Heap * heap,Object ** start,Object ** end))257   INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
258     for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
259   }
260 
261  private:
INLINE(static int VisitJSFunction (Map * map,HeapObject * object))262   INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
263     Heap* heap = map->GetHeap();
264     VisitPointers(heap,
265                   HeapObject::RawField(object, JSFunction::kPropertiesOffset),
266                   HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
267 
268     // Don't visit code entry. We are using this visitor only during scavenges.
269 
270     VisitPointers(
271         heap, HeapObject::RawField(object,
272                                    JSFunction::kCodeEntryOffset + kPointerSize),
273         HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
274     return JSFunction::kSize;
275   }
276 
INLINE(static int VisitByteArray (Map * map,HeapObject * object))277   INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
278     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
279   }
280 
INLINE(static int VisitFixedDoubleArray (Map * map,HeapObject * object))281   INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
282     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
283     return FixedDoubleArray::SizeFor(length);
284   }
285 
INLINE(static int VisitFixedTypedArray (Map * map,HeapObject * object))286   INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
287     return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
288   }
289 
INLINE(static int VisitJSObject (Map * map,HeapObject * object))290   INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
291     return JSObjectVisitor::Visit(map, object);
292   }
293 
INLINE(static int VisitSeqOneByteString (Map * map,HeapObject * object))294   INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
295     return SeqOneByteString::cast(object)
296         ->SeqOneByteStringSize(map->instance_type());
297   }
298 
INLINE(static int VisitSeqTwoByteString (Map * map,HeapObject * object))299   INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
300     return SeqTwoByteString::cast(object)
301         ->SeqTwoByteStringSize(map->instance_type());
302   }
303 
INLINE(static int VisitFreeSpace (Map * map,HeapObject * object))304   INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
305     return FreeSpace::cast(object)->Size();
306   }
307 
308   INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
309   INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
310   INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
311 
312   class DataObjectVisitor {
313    public:
314     template <int object_size>
VisitSpecialized(Map * map,HeapObject * object)315     static inline int VisitSpecialized(Map* map, HeapObject* object) {
316       return object_size;
317     }
318 
INLINE(static int Visit (Map * map,HeapObject * object))319     INLINE(static int Visit(Map* map, HeapObject* object)) {
320       return map->instance_size();
321     }
322   };
323 
324   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
325       StructVisitor;
326 
327   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
328       JSObjectVisitor;
329 
330   typedef int (*Callback)(Map* map, HeapObject* object);
331 
332   static VisitorDispatchTable<Callback> table_;
333 };
334 
335 
336 template <typename StaticVisitor>
337 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
338     StaticNewSpaceVisitor<StaticVisitor>::table_;
339 
340 
341 // Base class for visitors used to transitively mark the entire heap.
342 // IterateBody returns nothing.
343 // Certain types of objects might not be handled by this base class and
344 // no visitor function is registered by the generic initialization. A
345 // specialized visitor function needs to be provided by the inheriting
346 // class itself for those cases.
347 //
348 // This class is intended to be used in the following way:
349 //
350 //   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
351 //     ...
352 //   }
353 //
354 // This is an example of Curiously recurring template pattern.
355 template <typename StaticVisitor>
356 class StaticMarkingVisitor : public StaticVisitorBase {
357  public:
358   static void Initialize();
359 
INLINE(static void IterateBody (Map * map,HeapObject * obj))360   INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
361     table_.GetVisitor(map)(map, obj);
362   }
363 
364   INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
365   INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
366   INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
367   INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
368   INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
369   INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
370   INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitExternalReference (RelocInfo * rinfo))371   INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
INLINE(static void VisitRuntimeEntry (RelocInfo * rinfo))372   INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
373   // Skip the weak next code link in a code object.
INLINE(static void VisitNextCodeLink (Heap * heap,Object ** slot))374   INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
375 
376   // TODO(mstarzinger): This should be made protected once refactoring is done.
377   // Mark non-optimize code for functions inlined into the given optimized
378   // code. This will prevent it from being flushed.
379   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
380 
381  protected:
382   INLINE(static void VisitMap(Map* map, HeapObject* object));
383   INLINE(static void VisitCode(Map* map, HeapObject* object));
384   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
385   INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
386   INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
387   INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
388   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
389   INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
390   INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
391   INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
392   INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
393   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
394 
395   // Mark pointers in a Map and its TransitionArray together, possibly
396   // treating transitions or back pointers weak.
397   static void MarkMapContents(Heap* heap, Map* map);
398   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
399 
400   // Code flushing support.
401   INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
402   INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
403 
404   // Helpers used by code flushing support that visit pointer fields and treat
405   // references to code objects either strongly or weakly.
406   static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
407   static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
408   static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
409   static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
410 
411   class DataObjectVisitor {
412    public:
413     template <int size>
VisitSpecialized(Map * map,HeapObject * object)414     static inline void VisitSpecialized(Map* map, HeapObject* object) {}
415 
INLINE(static void Visit (Map * map,HeapObject * object))416     INLINE(static void Visit(Map* map, HeapObject* object)) {}
417   };
418 
419   typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
420       FixedArrayVisitor;
421 
422   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
423       JSObjectVisitor;
424 
425   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
426       StructObjectVisitor;
427 
428   typedef void (*Callback)(Map* map, HeapObject* object);
429 
430   static VisitorDispatchTable<Callback> table_;
431 };
432 
433 
434 template <typename StaticVisitor>
435 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
436     StaticMarkingVisitor<StaticVisitor>::table_;
437 
438 
439 class WeakObjectRetainer;
440 
441 
442 // A weak list is single linked list where each element has a weak pointer to
443 // the next element. Given the head of the list, this function removes dead
444 // elements from the list and if requested records slots for next-element
445 // pointers. The template parameter T is a WeakListVisitor that defines how to
446 // access the next-element pointers.
447 template <class T>
448 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
449 }
450 }  // namespace v8::internal
451 
452 #endif  // V8_OBJECTS_VISITING_H_
453