Lines Matching refs:heap_
56 rb_table_(heap_->GetReadBarrierTable()), in ConcurrentCopying()
137 for (const auto& space : heap_->GetContinuousSpaces()) { in BindBitmaps()
271 ThreadFlipVisitor thread_flip_visitor(this, heap_->use_tlab_); in FlipThreadRoots()
273 heap_->ThreadFlipBegin(self); // Sync with JNI critical calls. in FlipThreadRoots()
276 heap_->ThreadFlipEnd(self); in FlipThreadRoots()
291 heap_->SwapStacks(); in SwapStacks()
296 live_stack_freeze_size_ = heap_->GetLiveStack()->Size(); in RecordLiveStackFreezeSize()
314 DCHECK(collector_->heap_->GetMarkBitmap()->Test(obj)) in operator ()()
366 for (space::ContinuousSpace* space : heap_->GetContinuousSpaces()) { in MarkingPhase()
544 heap_->rb_table_->ClearAll(); in DisableMarking()
545 DCHECK(heap_->rb_table_->IsAllCleared()); in DisableMarking()
651 return heap_->allocation_stack_.get(); in GetAllocationStack()
655 return heap_->live_stack_.get(); in GetLiveStack()
796 heap_->GetMarkBitmap()->Visit(visitor); in VerifyNoFromSpaceReferences()
801 for (auto* it = heap_->allocation_stack_->Begin(), *end = heap_->allocation_stack_->End(); in VerifyNoFromSpaceReferences()
1201 accounting::ObjectStack* live_stack = heap_->GetLiveStack(); in Sweep()
1205 heap_->MarkAllocStackAsLive(live_stack); in Sweep()
1226 RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps)); in SweepLargeObjects()
1236 DCHECK(collector_->heap_->GetMarkBitmap()->Test(obj)) << obj; in operator ()()
1251 for (auto& space : heap_->GetContinuousSpaces()) { in ClearBlackPtrs()
1263 space::LargeObjectSpace* large_object_space = heap_->GetLargeObjectsSpace(); in ClearBlackPtrs()
1332 …LOG(INFO) << "(before) num_bytes_allocated=" << heap_->num_bytes_allocated_.LoadSequentiallyConsis… in ReclaimPhase()
1336 …LOG(INFO) << "(after) num_bytes_allocated=" << heap_->num_bytes_allocated_.LoadSequentiallyConsist… in ReclaimPhase()
1357 heap_->UnBindBitmaps(); in ReclaimPhase()
1412 CHECK(heap_->collector_type_ == kCollectorTypeCC) << static_cast<size_t>(heap_->collector_type_); in AssertToSpaceInvariant()
1459 CHECK(heap_->collector_type_ == kCollectorTypeCC) << static_cast<size_t>(heap_->collector_type_); in AssertToSpaceInvariant()
1809 if (heap_->use_tlab_) { in Copy()
1823 to_ref = heap_->non_moving_space_->Alloc(Thread::Current(), obj_size, in Copy()
1857 heap_->num_bytes_allocated_.FetchAndAddSequentiallyConsistent(bytes_allocated); in Copy()
1865 DCHECK(heap_->non_moving_space_->HasAddress(to_ref)); in Copy()
1872 heap_->non_moving_space_->Free(Thread::Current(), to_ref); in Copy()
1880 CHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref)); in Copy()
1901 DCHECK(heap_->non_moving_space_->HasAddress(to_ref)); in Copy()
1929 heap_->non_moving_space_->HasAddress(to_ref)) in IsMarked()
2084 heap_->ClearMarkedObjects(); in FinishPhase()
2106 heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this); in DelayReferenceReferent()