1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8 #include "src/heap/array-buffer-tracker.h"
9 #include "src/heap/objects-visiting.h"
10 #include "src/ic/ic-state.h"
11 #include "src/macro-assembler.h"
12 #include "src/objects-body-descriptors-inl.h"
13
14 namespace v8 {
15 namespace internal {
16
17
18 template <typename Callback>
GetVisitor(Map * map)19 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
20 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
21 }
22
23
24 template <typename StaticVisitor>
Initialize()25 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
26 table_.Register(
27 kVisitShortcutCandidate,
28 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
29
30 table_.Register(
31 kVisitConsString,
32 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
33
34 table_.Register(kVisitSlicedString,
35 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
36 int>::Visit);
37
38 table_.Register(
39 kVisitSymbol,
40 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
41
42 table_.Register(kVisitFixedArray,
43 &FlexibleBodyVisitor<StaticVisitor,
44 FixedArray::BodyDescriptor, int>::Visit);
45
46 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
47 table_.Register(
48 kVisitFixedTypedArray,
49 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
50 int>::Visit);
51
52 table_.Register(
53 kVisitFixedFloat64Array,
54 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55 int>::Visit);
56
57 table_.Register(
58 kVisitNativeContext,
59 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
60 int>::Visit);
61
62 table_.Register(kVisitByteArray, &VisitByteArray);
63 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
64
65 table_.Register(
66 kVisitSharedFunctionInfo,
67 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
68 int>::Visit);
69
70 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
71
72 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
73
74 // Don't visit code entry. We are using this visitor only during scavenges.
75 table_.Register(
76 kVisitJSFunction,
77 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
78 int>::Visit);
79
80 table_.Register(
81 kVisitJSArrayBuffer,
82 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
83 int>::Visit);
84
85 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
86
87 table_.Register(
88 kVisitJSWeakCollection,
89 &FlexibleBodyVisitor<StaticVisitor, JSWeakCollection::BodyDescriptor,
90 int>::Visit);
91
92 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
93
94 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
95 kVisitDataObjectGeneric>();
96
97 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
98 kVisitJSObjectGeneric>();
99
100 // Not using specialized Api object visitor for newspace.
101 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSApiObject,
102 kVisitJSApiObjectGeneric>();
103
104 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
105 kVisitStructGeneric>();
106 }
107
108 template <typename StaticVisitor>
VisitBytecodeArray(Map * map,HeapObject * object)109 int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
110 Map* map, HeapObject* object) {
111 VisitPointers(
112 map->GetHeap(), object,
113 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
114 HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
115 return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
116 }
117
118
119 template <typename StaticVisitor>
Initialize()120 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
121 table_.Register(kVisitShortcutCandidate,
122 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
123 void>::Visit);
124
125 table_.Register(kVisitConsString,
126 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
127 void>::Visit);
128
129 table_.Register(kVisitSlicedString,
130 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
131 void>::Visit);
132
133 table_.Register(
134 kVisitSymbol,
135 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
136
137 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
138
139 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
140
141 table_.Register(
142 kVisitFixedTypedArray,
143 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
144 void>::Visit);
145
146 table_.Register(
147 kVisitFixedFloat64Array,
148 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
149 void>::Visit);
150
151 table_.Register(kVisitNativeContext, &VisitNativeContext);
152
153 table_.Register(
154 kVisitAllocationSite,
155 &FixedBodyVisitor<StaticVisitor, AllocationSite::MarkingBodyDescriptor,
156 void>::Visit);
157
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159
160 table_.Register(
161 kVisitBytecodeArray,
162 &FixedBodyVisitor<StaticVisitor, BytecodeArray::MarkingBodyDescriptor,
163 void>::Visit);
164
165 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
166
167 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
168
169 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
170
171 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
172
173 table_.Register(
174 kVisitOddball,
175 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
176
177 table_.Register(kVisitMap, &VisitMap);
178
179 table_.Register(kVisitCode, &VisitCode);
180
181 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
182
183 table_.Register(kVisitJSFunction, &VisitJSFunction);
184
185 table_.Register(
186 kVisitJSArrayBuffer,
187 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
188 void>::Visit);
189
190 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
191
192 table_.Register(
193 kVisitCell,
194 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
195
196 table_.Register(kVisitPropertyCell,
197 &FixedBodyVisitor<StaticVisitor, PropertyCell::BodyDescriptor,
198 void>::Visit);
199
200 table_.Register(kVisitWeakCell, &VisitWeakCell);
201
202 table_.Register(kVisitTransitionArray, &VisitTransitionArray);
203
204 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
205 kVisitDataObjectGeneric>();
206
207 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
208 kVisitJSObjectGeneric>();
209
210 table_.template RegisterSpecializations<JSApiObjectVisitor, kVisitJSApiObject,
211 kVisitJSApiObjectGeneric>();
212
213 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
214 kVisitStructGeneric>();
215 }
216
217
218 template <typename StaticVisitor>
VisitCodeEntry(Heap * heap,HeapObject * object,Address entry_address)219 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
220 Heap* heap, HeapObject* object, Address entry_address) {
221 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
222 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
223 code);
224 StaticVisitor::MarkObject(heap, code);
225 }
226
227
228 template <typename StaticVisitor>
VisitEmbeddedPointer(Heap * heap,RelocInfo * rinfo)229 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
230 Heap* heap, RelocInfo* rinfo) {
231 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
232 HeapObject* object = HeapObject::cast(rinfo->target_object());
233 Code* host = rinfo->host();
234 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
235 // TODO(ulan): It could be better to record slots only for strongly embedded
236 // objects here and record slots for weakly embedded object during clearing
237 // of non-live references in mark-compact.
238 if (!host->IsWeakObject(object)) {
239 StaticVisitor::MarkObject(heap, object);
240 }
241 }
242
243
244 template <typename StaticVisitor>
VisitCell(Heap * heap,RelocInfo * rinfo)245 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
246 RelocInfo* rinfo) {
247 DCHECK(rinfo->rmode() == RelocInfo::CELL);
248 Cell* cell = rinfo->target_cell();
249 Code* host = rinfo->host();
250 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
251 if (!host->IsWeakObject(cell)) {
252 StaticVisitor::MarkObject(heap, cell);
253 }
254 }
255
256
257 template <typename StaticVisitor>
VisitDebugTarget(Heap * heap,RelocInfo * rinfo)258 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
259 RelocInfo* rinfo) {
260 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
261 rinfo->IsPatchedDebugBreakSlotSequence());
262 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
263 Code* host = rinfo->host();
264 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
265 StaticVisitor::MarkObject(heap, target);
266 }
267
268
269 template <typename StaticVisitor>
VisitCodeTarget(Heap * heap,RelocInfo * rinfo)270 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
271 RelocInfo* rinfo) {
272 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
273 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
274 // Monomorphic ICs are preserved when possible, but need to be flushed
275 // when they might be keeping a Context alive, or when the heap is about
276 // to be serialized.
277 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
278 (heap->isolate()->serializer_enabled() ||
279 target->ic_age() != heap->global_ic_age())) {
280 ICUtility::Clear(heap->isolate(), rinfo->pc(),
281 rinfo->host()->constant_pool());
282 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
283 }
284 Code* host = rinfo->host();
285 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
286 StaticVisitor::MarkObject(heap, target);
287 }
288
289
290 template <typename StaticVisitor>
VisitCodeAgeSequence(Heap * heap,RelocInfo * rinfo)291 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
292 Heap* heap, RelocInfo* rinfo) {
293 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
294 Code* target = rinfo->code_age_stub();
295 DCHECK(target != NULL);
296 Code* host = rinfo->host();
297 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
298 StaticVisitor::MarkObject(heap, target);
299 }
300
301
302 template <typename StaticVisitor>
VisitNativeContext(Map * map,HeapObject * object)303 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
304 Map* map, HeapObject* object) {
305 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
306 void>::Visit(map, object);
307 }
308
309
310 template <typename StaticVisitor>
VisitMap(Map * map,HeapObject * object)311 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
312 HeapObject* object) {
313 Heap* heap = map->GetHeap();
314 Map* map_object = Map::cast(object);
315
316 // Clears the cache of ICs related to this map.
317 if (FLAG_cleanup_code_caches_at_gc) {
318 map_object->ClearCodeCache(heap);
319 }
320
321 // When map collection is enabled we have to mark through map's transitions
322 // and back pointers in a special way to make these links weak.
323 if (map_object->CanTransition()) {
324 MarkMapContents(heap, map_object);
325 } else {
326 StaticVisitor::VisitPointers(
327 heap, object,
328 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
329 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
330 }
331 }
332
333 template <typename StaticVisitor>
VisitWeakCell(Map * map,HeapObject * object)334 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
335 HeapObject* object) {
336 Heap* heap = map->GetHeap();
337 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
338 // Enqueue weak cell in linked list of encountered weak collections.
339 // We can ignore weak cells with cleared values because they will always
340 // contain smi zero.
341 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
342 HeapObject* value = HeapObject::cast(weak_cell->value());
343 if (MarkCompactCollector::IsMarked(value)) {
344 // Weak cells with live values are directly processed here to reduce
345 // the processing time of weak cells during the main GC pause.
346 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
347 map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
348 *slot);
349 } else {
350 // If we do not know about liveness of values of weak cells, we have to
351 // process them when we know the liveness of the whole transitive
352 // closure.
353 weak_cell->set_next(heap->encountered_weak_cells(),
354 UPDATE_WEAK_WRITE_BARRIER);
355 heap->set_encountered_weak_cells(weak_cell);
356 }
357 }
358 }
359
360
361 template <typename StaticVisitor>
VisitTransitionArray(Map * map,HeapObject * object)362 void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
363 Map* map, HeapObject* object) {
364 TransitionArray* array = TransitionArray::cast(object);
365 Heap* heap = array->GetHeap();
366 // Visit strong references.
367 if (array->HasPrototypeTransitions()) {
368 StaticVisitor::VisitPointer(heap, array,
369 array->GetPrototypeTransitionsSlot());
370 }
371 int num_transitions = TransitionArray::NumberOfTransitions(array);
372 for (int i = 0; i < num_transitions; ++i) {
373 StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
374 }
375 // Enqueue the array in linked list of encountered transition arrays if it is
376 // not already in the list.
377 if (array->next_link()->IsUndefined(heap->isolate())) {
378 Heap* heap = map->GetHeap();
379 array->set_next_link(heap->encountered_transition_arrays(),
380 UPDATE_WEAK_WRITE_BARRIER);
381 heap->set_encountered_transition_arrays(array);
382 }
383 }
384
385 template <typename StaticVisitor>
VisitWeakCollection(Map * map,HeapObject * object)386 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
387 Map* map, HeapObject* object) {
388 typedef FlexibleBodyVisitor<StaticVisitor,
389 JSWeakCollection::BodyDescriptorWeak,
390 void> JSWeakCollectionBodyVisitor;
391 Heap* heap = map->GetHeap();
392 JSWeakCollection* weak_collection =
393 reinterpret_cast<JSWeakCollection*>(object);
394
395 // Enqueue weak collection in linked list of encountered weak collections.
396 if (weak_collection->next() == heap->undefined_value()) {
397 weak_collection->set_next(heap->encountered_weak_collections());
398 heap->set_encountered_weak_collections(weak_collection);
399 }
400
401 // Skip visiting the backing hash table containing the mappings and the
402 // pointer to the other enqueued weak collections, both are post-processed.
403 JSWeakCollectionBodyVisitor::Visit(map, object);
404
405 // Partially initialized weak collection is enqueued, but table is ignored.
406 if (!weak_collection->table()->IsHashTable()) return;
407
408 // Mark the backing hash table without pushing it on the marking stack.
409 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
410 HeapObject* obj = HeapObject::cast(*slot);
411 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
412 StaticVisitor::MarkObjectWithoutPush(heap, obj);
413 }
414
415
416 template <typename StaticVisitor>
VisitCode(Map * map,HeapObject * object)417 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
418 HeapObject* object) {
419 typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
420 CodeBodyVisitor;
421 Heap* heap = map->GetHeap();
422 Code* code = Code::cast(object);
423 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
424 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
425 }
426 CodeBodyVisitor::Visit(map, object);
427 }
428
429
430 template <typename StaticVisitor>
VisitSharedFunctionInfo(Map * map,HeapObject * object)431 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
432 Map* map, HeapObject* object) {
433 Heap* heap = map->GetHeap();
434 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
435 if (shared->ic_age() != heap->global_ic_age()) {
436 shared->ResetForNewContext(heap->global_ic_age());
437 }
438 if (FLAG_flush_optimized_code_cache) {
439 if (!shared->OptimizedCodeMapIsCleared()) {
440 // Always flush the optimized code map if requested by flag.
441 shared->ClearOptimizedCodeMap();
442 }
443 }
444 MarkCompactCollector* collector = heap->mark_compact_collector();
445 if (collector->is_code_flushing_enabled()) {
446 if (IsFlushable(heap, shared)) {
447 // This function's code looks flushable. But we have to postpone
448 // the decision until we see all functions that point to the same
449 // SharedFunctionInfo because some of them might be optimized.
450 // That would also make the non-optimized version of the code
451 // non-flushable, because it is required for bailing out from
452 // optimized code.
453 collector->code_flusher()->AddCandidate(shared);
454 // Treat the reference to the code object weakly.
455 VisitSharedFunctionInfoWeakCode(map, object);
456 return;
457 }
458 }
459 VisitSharedFunctionInfoStrongCode(map, object);
460 }
461
462
463 template <typename StaticVisitor>
VisitJSFunction(Map * map,HeapObject * object)464 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
465 HeapObject* object) {
466 Heap* heap = map->GetHeap();
467 JSFunction* function = JSFunction::cast(object);
468 if (FLAG_cleanup_code_caches_at_gc) {
469 function->ClearTypeFeedbackInfoAtGCTime();
470 }
471 MarkCompactCollector* collector = heap->mark_compact_collector();
472 if (collector->is_code_flushing_enabled()) {
473 if (IsFlushable(heap, function)) {
474 // This function's code looks flushable. But we have to postpone
475 // the decision until we see all functions that point to the same
476 // SharedFunctionInfo because some of them might be optimized.
477 // That would also make the non-optimized version of the code
478 // non-flushable, because it is required for bailing out from
479 // optimized code.
480 collector->code_flusher()->AddCandidate(function);
481 // Treat the reference to the code object weakly.
482 VisitJSFunctionWeakCode(map, object);
483 return;
484 } else {
485 // Visit all unoptimized code objects to prevent flushing them.
486 StaticVisitor::MarkObject(heap, function->shared()->code());
487 }
488 }
489 VisitJSFunctionStrongCode(map, object);
490 }
491
492 template <typename StaticVisitor>
MarkMapContents(Heap * heap,Map * map)493 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
494 Map* map) {
495 // Since descriptor arrays are potentially shared, ensure that only the
496 // descriptors that belong to this map are marked. The first time a non-empty
497 // descriptor array is marked, its header is also visited. The slot holding
498 // the descriptor array will be implicitly recorded when the pointer fields of
499 // this map are visited. Prototype maps don't keep track of transitions, so
500 // just mark the entire descriptor array.
501 if (!map->is_prototype_map()) {
502 DescriptorArray* descriptors = map->instance_descriptors();
503 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
504 descriptors->length() > 0) {
505 StaticVisitor::VisitPointers(heap, descriptors,
506 descriptors->GetFirstElementAddress(),
507 descriptors->GetDescriptorEndSlot(0));
508 }
509 int start = 0;
510 int end = map->NumberOfOwnDescriptors();
511 if (start < end) {
512 StaticVisitor::VisitPointers(heap, descriptors,
513 descriptors->GetDescriptorStartSlot(start),
514 descriptors->GetDescriptorEndSlot(end));
515 }
516 }
517
518 // Mark the pointer fields of the Map. Since the transitions array has
519 // been marked already, it is fine that one of these fields contains a
520 // pointer to it.
521 StaticVisitor::VisitPointers(
522 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
523 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
524 }
525
526
HasSourceCode(Heap * heap,SharedFunctionInfo * info)527 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
528 Object* undefined = heap->undefined_value();
529 return (info->script() != undefined) &&
530 (reinterpret_cast<Script*>(info->script())->source() != undefined);
531 }
532
533
534 template <typename StaticVisitor>
IsFlushable(Heap * heap,JSFunction * function)535 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
536 JSFunction* function) {
537 SharedFunctionInfo* shared_info = function->shared();
538
539 // Code is either on stack, in compilation cache or referenced
540 // by optimized version of function.
541 MarkBit code_mark = ObjectMarking::MarkBitFrom(function->code());
542 if (Marking::IsBlackOrGrey(code_mark)) {
543 return false;
544 }
545
546 // We do not (yet) flush code for optimized functions.
547 if (function->code() != shared_info->code()) {
548 return false;
549 }
550
551 // Check age of optimized code.
552 if (FLAG_age_code && !function->code()->IsOld()) {
553 return false;
554 }
555
556 return IsFlushable(heap, shared_info);
557 }
558
559
560 template <typename StaticVisitor>
IsFlushable(Heap * heap,SharedFunctionInfo * shared_info)561 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
562 Heap* heap, SharedFunctionInfo* shared_info) {
563 // Code is either on stack, in compilation cache or referenced
564 // by optimized version of function.
565 MarkBit code_mark = ObjectMarking::MarkBitFrom(shared_info->code());
566 if (Marking::IsBlackOrGrey(code_mark)) {
567 return false;
568 }
569
570 // The function must be compiled and have the source code available,
571 // to be able to recompile it in case we need the function again.
572 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
573 return false;
574 }
575
576 // We never flush code for API functions.
577 if (shared_info->IsApiFunction()) {
578 return false;
579 }
580
581 // Only flush code for functions.
582 if (shared_info->code()->kind() != Code::FUNCTION) {
583 return false;
584 }
585
586 // Function must be lazy compilable.
587 if (!shared_info->allows_lazy_compilation()) {
588 return false;
589 }
590
591 // We do not (yet?) flush code for generator functions, or async functions,
592 // because we don't know if there are still live activations
593 // (generator objects) on the heap.
594 if (IsResumableFunction(shared_info->kind())) {
595 return false;
596 }
597
598 // If this is a full script wrapped in a function we do not flush the code.
599 if (shared_info->is_toplevel()) {
600 return false;
601 }
602
603 // The function must not be a builtin.
604 if (shared_info->IsBuiltin()) {
605 return false;
606 }
607
608 // Maintain debug break slots in the code.
609 if (shared_info->HasDebugCode()) {
610 return false;
611 }
612
613 // If this is a function initialized with %SetCode then the one-to-one
614 // relation between SharedFunctionInfo and Code is broken.
615 if (shared_info->dont_flush()) {
616 return false;
617 }
618
619 // Check age of code. If code aging is disabled we never flush.
620 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
621 return false;
622 }
623
624 return true;
625 }
626
627 template <typename StaticVisitor>
VisitSharedFunctionInfoStrongCode(Map * map,HeapObject * object)628 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
629 Map* map, HeapObject* object) {
630 FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
631 void>::Visit(map, object);
632 }
633
634 template <typename StaticVisitor>
VisitSharedFunctionInfoWeakCode(Map * map,HeapObject * object)635 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
636 Map* map, HeapObject* object) {
637 // Skip visiting kCodeOffset as it is treated weakly here.
638 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset <
639 SharedFunctionInfo::BodyDescriptorWeakCode::kStartOffset);
640 FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptorWeakCode,
641 void>::Visit(map, object);
642 }
643
644 template <typename StaticVisitor>
VisitJSFunctionStrongCode(Map * map,HeapObject * object)645 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
646 Map* map, HeapObject* object) {
647 typedef FlexibleBodyVisitor<StaticVisitor,
648 JSFunction::BodyDescriptorStrongCode,
649 void> JSFunctionStrongCodeBodyVisitor;
650 JSFunctionStrongCodeBodyVisitor::Visit(map, object);
651 }
652
653
654 template <typename StaticVisitor>
VisitJSFunctionWeakCode(Map * map,HeapObject * object)655 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
656 Map* map, HeapObject* object) {
657 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
658 void> JSFunctionWeakCodeBodyVisitor;
659 JSFunctionWeakCodeBodyVisitor::Visit(map, object);
660 }
661
662
663 } // namespace internal
664 } // namespace v8
665
666 #endif // V8_OBJECTS_VISITING_INL_H_
667