Searched refs:new_space (Results 1 – 16 of 16) sorted by relevance
/external/v8/src/heap/ |
D | scavenger.cc | 102 if (heap->new_space()->Contains(obj)) { in RecordCopiedObject() 103 heap->new_space()->RecordAllocation(obj); in RecordCopiedObject() 105 heap->new_space()->RecordPromotion(obj); in RecordCopiedObject() 119 target->address() + size == heap->new_space()->top() || in INLINE() 120 target->address() + size + kPointerSize == heap->new_space()->top()); in INLINE() 126 heap->new_space()->top())); in INLINE() 154 heap->new_space()->AllocateRaw(object_size, alignment); in SemiSpaceCopyObject() 162 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); in SemiSpaceCopyObject()
|
D | scavenge-job.cc | 28 size_t new_space_size = heap->new_space()->Size(); in RunInternal() 29 size_t new_space_capacity = heap->new_space()->Capacity(); in RunInternal()
|
D | array-buffer-tracker.cc | 81 for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(), in FreeDeadInNewSpace() 82 heap->new_space()->FromSpaceEnd())) { in FreeDeadInNewSpace()
|
D | mark-compact.cc | 157 VerifyMarking(heap->new_space()); in VerifyMarking() 230 VerifyEvacuation(heap->new_space()); in VerifyEvacuation() 347 VerifyMarkbitsAreClean(heap_->new_space()); in VerifyMarkbitsAreClean() 398 ClearMarkbitsInNewSpace(heap_->new_space()); in ClearMarkbits() 522 for (Page* p : *heap_->new_space()) { in EnsureNewSpaceCompleted() 1777 heap_->new_space()->AllocateRawSynchronized(size_in_bytes, alignment); in AllocateInNewSpace() 1779 if (!heap_->new_space()->AddFreshPageSynchronized()) { in AllocateInNewSpace() 1782 allocation = heap_->new_space()->AllocateRawSynchronized(size_in_bytes, in AllocateInNewSpace() 1849 page->heap()->new_space()->MovePageFromSpaceToSpace(page); in Move() 1923 NewSpace* space = heap()->new_space(); in DiscoverGreyObjectsInNewSpace() [all …]
|
D | incremental-marking.cc | 364 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space()); in DeactivateIncrementalWriteBarrier() 390 ActivateIncrementalWriteBarrier(heap_->new_space()); in ActivateIncrementalWriteBarrier() 495 if (space == heap_->new_space()) { in Start() 961 if (space == heap_->new_space()) { in Stop() 1073 size_t oom_slack = heap()->new_space()->Capacity() + 64 * MB; in StepSizeToMakeProgress()
|
D | gc-tracer.cc | 178 heap_->new_space()->top() - heap_->new_space()->bottom(); in Start()
|
D | heap.cc | 665 (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); in GarbageCollectionEpilogue() 707 UPDATE_COUNTERS_FOR_SPACE(new_space) in GarbageCollectionEpilogue() 722 new_space_top_after_last_gc_ = new_space()->top(); in GarbageCollectionEpilogue() 1186 allocation = new_space()->AllocateRawUnaligned(size); in ReserveSpace() 1312 int start_new_space_size = static_cast<int>(Heap::new_space()->Size()); in PerformGarbageCollection() 1536 reinterpret_cast<struct Entry*>(heap_->new_space()->ToSpaceEnd()); in Initialize() 4038 return new_space_top_after_last_gc_ == new_space()->top(); in IsHeapIterable() 5368 new_space()->UpdateInlineAllocationLimit(0); in EnableInlineAllocation() 5377 new_space()->UpdateInlineAllocationLimit(0); in DisableInlineAllocation() 5436 new_space_top_after_last_gc_ = new_space()->top(); in SetUp() [all …]
|
D | heap-inl.h | 59 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), in insert() 188 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC(); in NewSpaceAllocationCounter()
|
D | heap.h | 984 NewSpace* new_space() { return new_space_; } in new_space() function
|
/external/v8/src/extensions/ |
D | statistics-extension.cc | 121 {heap->new_space()->Size(), "new_space_live_bytes"}, in GetCounters() 122 {heap->new_space()->Available(), "new_space_available_bytes"}, in GetCounters() 123 {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"}, in GetCounters()
|
/external/v8/src/profiler/ |
D | sampling-heap-profiler.cc | 69 heap->new_space()->AddAllocationObserver(new_space_observer_.get()); in SamplingHeapProfiler() 72 if (space != heap->new_space()) { in SamplingHeapProfiler() 80 heap_->new_space()->RemoveAllocationObserver(new_space_observer_.get()); in ~SamplingHeapProfiler() 83 if (space != heap_->new_space()) { in ~SamplingHeapProfiler()
|
/external/v8/src/ |
D | string-stream.cc | 575 char* new_space = NewArray<char>(new_bytes); in grow() local 576 if (new_space == NULL) { in grow() 579 MemCopy(new_space, space_, *bytes); in grow() 582 space_ = new_space; in grow() 583 return new_space; in grow()
|
D | code-stub-assembler.cc | 825 bool const new_space = !(flags & kPretenured); in Allocate() local 827 new_space in Allocate() 831 new_space in Allocate() 1591 bool const new_space = !(flags & kPretenured); in AllocateConsString() local 1592 if (new_space) { in AllocateConsString()
|
D | api.cc | 6665 return !isolate->heap()->new_space()->Contains(*obj); in CanMakeExternal()
|
/external/libjpeg-turbo/ |
D | jmemmgr.c | 655 size_t new_space = (long) sptr->rows_in_array * in realize_virt_arrays() local 660 if (SIZE_MAX - maximum_space < new_space) in realize_virt_arrays() 662 maximum_space += new_space; in realize_virt_arrays() 667 size_t new_space = (long) bptr->rows_in_array * in realize_virt_arrays() local 672 if (SIZE_MAX - maximum_space < new_space) in realize_virt_arrays() 674 maximum_space += new_space; in realize_virt_arrays()
|
/external/blktrace/ |
D | blkparse.c | 326 int new_space, size; in resize_cpu_info() local 339 new_space = (new_count - ncpus) * sizeof(struct per_cpu_info); in resize_cpu_info() 340 memset(new_start, 0, new_space); in resize_cpu_info()
|