Searched refs:IsMarking (Results 1 – 11 of 11) sorted by relevance
36 V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; } in IsMarking() function68 if (!value_chunk->IsMarking()) return; in MarkingBarrierInternal()140 if (!object_chunk->IsMarking()) return; in MarkingBarrierForElements()150 if (!object_chunk->IsMarking()) return; in MarkingBarrierForCode()
21 if (IsMarking() && value->IsHeapObject()) { in RecordWrite()33 if (IsMarking() && value->ToStrongOrWeakHeapObject(&heap_object)) { in RecordMaybeWeakWrite()
119 DCHECK(IsMarking()); in RecordWriteIntoCode()150 DCHECK(IsMarking()); in NotifyLeftTrimming()427 DCHECK(IsMarking()); in StartBlackAllocation()440 DCHECK(IsMarking()); in PauseBlackAllocation()470 DCHECK(IsMarking()); in MarkRoots()542 DCHECK(IsMarking()); in FinalizeIncrementally()574 if (!IsMarking()) return; in UpdateMarkingWorklistAfterScavenge()723 if (!IsMarking()) return; in UpdateMarkedBytesAfterScavenge()755 if (IsMarking() && marking_state()->IsBlack(obj)) { in ProcessBlackAllocatedObject()761 DCHECK(IsMarking()); in RevisitObject()
141 inline bool IsMarking() const { return state() >= MARKING; } in IsMarking() function152 return IsMarking() && in NeedsFinalization()227 bool IsCompacting() { return IsMarking() && is_compacting_; } in IsCompacting()
92 is_incremental_marking_(heap->incremental_marking()->IsMarking()), in Scavenger()
1074 incremental_marking()->IsMarking() && in HandleGCRequest()1488 if (FLAG_concurrent_marking && incremental_marking()->IsMarking()) { in MoveElements()1983 if (incremental_marking()->IsMarking()) in EvacuateYoungGeneration()2926 if (incremental_marking()->IsMarking()) { in LeftTrimFixedArray()3237 if (incremental_marking()->IsMarking() && in FinalizeIncrementalMarkingIfComplete()3338 if (incremental_marking()->IsMarking()) { in NotifyObjectLayoutChange()4896 if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) { in RegisterExternallyReferencedObject()5924 DCHECK(heap->incremental_marking()->IsMarking()); in MarkingBarrierForCodeSlow()5938 !heap->incremental_marking()->IsMarking() || in PageFlagsAreConsistent()5940 slim_chunk->IsMarking()); in PageFlagsAreConsistent()
716 page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking()); in InitializePage()732 page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking()); in InitializePage()2782 if (page->heap()->incremental_marking()->IsMarking()) { in AllocateChunk()3454 page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking()); in AllocateChunk()3752 page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking()); in AllocateChunk()
410 if (heap()->incremental_marking()->IsMarking()) { in TearDown()758 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); in Prepare()1285 is_incremental_marking_(heap->incremental_marking()->IsMarking()) {} in EvacuateNewSpaceVisitor()3667 if (heap_->incremental_marking()->IsMarking()) { in Move()4383 } else if (heap()->incremental_marking()->IsMarking()) { in RawEvacuatePage()4403 } else if (heap()->incremental_marking()->IsMarking()) { in RawEvacuatePage()
218 Node* IsMarking() { in IsMarking() function in v8::internal::RecordWriteCodeStubAssembler378 Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit); in TF_BUILTIN()
1623 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER; in GetWriteBarrierMode()
10552 if (isolate->heap()->incremental_marking()->IsMarking()) { in FinalizeTracing()