/external/v8/test/cctest/heap/ |
D | test-spaces.cc | 363 NewSpace new_space(heap); in TEST() local 365 CHECK(new_space.SetUp(CcTest::heap()->ReservedSemiSpaceSize(), in TEST() 367 CHECK(new_space.HasBeenSetUp()); in TEST() 369 while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) { in TEST() 371 new_space.AllocateRawUnaligned(Page::kMaxRegularHeapObjectSize) in TEST() 373 CHECK(new_space.Contains(HeapObject::cast(obj))); in TEST() 376 new_space.TearDown(); in TEST() 762 NewSpace* new_space = i_isolate->heap()->new_space(); in UNINITIALIZED_TEST() local 766 if (new_space->InitialTotalCapacity() == Page::kPageSize) { in UNINITIALIZED_TEST() 767 CHECK_EQ(new_space->CommittedMemory(), new_space->InitialTotalCapacity()); in UNINITIALIZED_TEST() [all …]
|
D | utils-inl.h | 36 heap->new_space()->DisableInlineAllocationSteps(); 38 static_cast<int>(*heap->new_space()->allocation_limit_address() - 39 *heap->new_space()->allocation_top_address());
|
D | test-heap.cc | 2023 heap->new_space()->AllocateRawAligned(size, alignment); in NewSpaceAllocateAligned() 2033 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address(); in AlignNewSpace() 2045 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address(); in TEST() 2271 static void FillUpNewSpace(NewSpace* new_space) { in FillUpNewSpace() argument 2274 Heap* heap = new_space->heap(); in FillUpNewSpace() 2279 intptr_t available = new_space->Capacity() - new_space->Size(); in FillUpNewSpace() 2290 NewSpace* new_space = heap->new_space(); in TEST() local 2302 old_capacity = new_space->TotalCapacity(); in TEST() 2303 new_space->Grow(); in TEST() 2304 new_capacity = new_space->TotalCapacity(); in TEST() [all …]
|
/external/v8/test/cctest/ |
D | test-mementos.cc | 36 NewSpace* new_space = heap->new_space(); in SetUpNewSpaceWithPoisonedMementoAtTop() local 49 reinterpret_cast<AllocationMemento*>(new_space->top() + kHeapObjectTag); in SetUpNewSpaceWithPoisonedMementoAtTop() 78 Address top = CcTest::i_isolate()->heap()->new_space()->top(); in TEST() 79 *(CcTest::i_isolate()->heap()->new_space()->allocation_limit_address()) = top; in TEST()
|
D | test-dictionary.cc | 178 SimulateFullSpace(CcTest::heap()->new_space()); in TestHashSetCausesGC() 209 SimulateFullSpace(CcTest::heap()->new_space()); in TestHashMapCausesGC()
|
D | test-unboxed-doubles.cc | 1057 CHECK(isolate->heap()->new_space()->Contains(*obj)); in TEST() 1064 CHECK(isolate->heap()->new_space()->Contains(*temp)); in TEST() 1139 CHECK(isolate->heap()->new_space()->Contains(*obj)); in TEST() 1393 CHECK(isolate->heap()->new_space()->Contains(*obj)); in TEST() 1403 CHECK(isolate->heap()->new_space()->Contains(*temp)); in TEST()
|
D | test-api.cc | 18468 SimulateFullSpace(CcTest::heap()->new_space()); in PrologueCallbackAlloc() 18488 SimulateFullSpace(CcTest::heap()->new_space()); in EpilogueCallbackAlloc()
|
/external/v8/src/heap/ |
D | scavenger.cc | 99 if (heap->new_space()->Contains(obj)) { in RecordCopiedObject() 100 heap->new_space()->RecordAllocation(obj); in RecordCopiedObject() 102 heap->new_space()->RecordPromotion(obj); in RecordCopiedObject() 116 target->address() + size == heap->new_space()->top() || in INLINE() 117 target->address() + size + kPointerSize == heap->new_space()->top()); in INLINE() 123 heap->new_space()->top())); in INLINE() 151 heap->new_space()->AllocateRaw(object_size, alignment); in SemiSpaceCopyObject() 159 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); in SemiSpaceCopyObject()
|
D | scavenge-job.cc | 28 size_t new_space_size = heap->new_space()->Size(); in RunInternal() 29 size_t new_space_capacity = heap->new_space()->Capacity(); in RunInternal()
|
D | store-buffer-inl.h | 38 !heap_->new_space()->Contains(addr)); in EnterDirectlyIntoStoreBuffer()
|
D | mark-compact.cc | 155 VerifyMarking(heap->new_space()); in VerifyMarking() 232 VerifyEvacuation(heap->new_space()); in VerifyEvacuation() 415 VerifyMarkbitsAreClean(heap_->new_space()); in VerifyMarkbitsAreClean() 470 ClearMarkbitsInNewSpace(heap_->new_space()); in ClearMarkbits() 1666 heap_->new_space()->AllocateRawSynchronized(size_in_bytes, alignment); in AllocateInNewSpace() 1668 if (!heap_->new_space()->AddFreshPageSynchronized()) { in AllocateInNewSpace() 1671 allocation = heap_->new_space()->AllocateRawSynchronized(size_in_bytes, in AllocateInNewSpace() 1761 NewSpace* space = heap()->new_space(); in DiscoverGreyObjectsInNewSpace() 3068 NewSpace* new_space = heap()->new_space(); in EvacuateNewSpacePrologue() local 3071 Address from_bottom = new_space->bottom(); in EvacuateNewSpacePrologue() [all …]
|
D | incremental-marking.cc | 397 DeactivateIncrementalWriteBarrierForSpace(heap_->new_space()); in DeactivateIncrementalWriteBarrier() 429 ActivateIncrementalWriteBarrier(heap_->new_space()); in ActivateIncrementalWriteBarrier() 544 heap_->new_space()->AddInlineAllocationObserver(&observer_); in Start() 942 heap_->new_space()->RemoveInlineAllocationObserver(&observer_); in Stop() 971 heap_->new_space()->RemoveInlineAllocationObserver(&observer_); in Finalize()
|
D | heap.cc | 696 (new_space()->CommittedMemory() * 100.0) / CommittedMemory())); in GarbageCollectionEpilogue() 738 UPDATE_COUNTERS_FOR_SPACE(new_space) in GarbageCollectionEpilogue() 753 new_space_top_after_last_gc_ = new_space()->top(); in GarbageCollectionEpilogue() 1161 allocation = new_space()->AllocateRawUnaligned(size); in ReserveSpace() 1291 int start_new_space_size = Heap::new_space()->SizeAsInt(); in PerformGarbageCollection() 1423 PauseInlineAllocationObserversScope pause_observers(new_space()); in MarkCompact() 1567 reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd()); in Initialize() 1630 PauseInlineAllocationObserversScope pause_observers(new_space()); in Scavenge() 4004 return new_space_top_after_last_gc_ == new_space()->top(); in IsHeapIterable() 5042 new_space()->UpdateInlineAllocationLimit(0); in EnableInlineAllocation() [all …]
|
D | gc-tracer.cc | 164 heap_->new_space()->top() - heap_->new_space()->bottom(); in Start()
|
D | heap.h | 1080 NewSpace* new_space() { return &new_space_; } in new_space() function 1104 return new_space(); in space() 1456 return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC(); in NewSpaceAllocationCounter()
|
D | heap-inl.h | 44 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(), in insert()
|
D | spaces.h | 2962 explicit PauseInlineAllocationObserversScope(NewSpace* new_space) in PauseInlineAllocationObserversScope() argument 2963 : new_space_(new_space) { in PauseInlineAllocationObserversScope()
|
/external/v8/src/extensions/ |
D | statistics-extension.cc | 120 {heap->new_space()->Size(), "new_space_live_bytes"}, in GetCounters() 121 {heap->new_space()->Available(), "new_space_available_bytes"}, in GetCounters() 122 {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"}, in GetCounters()
|
/external/v8/src/ |
D | string-stream.cc | 567 char* new_space = NewArray<char>(new_bytes); in grow() local 568 if (new_space == NULL) { in grow() 571 MemCopy(new_space, space_, *bytes); in grow() 574 space_ = new_space; in grow() 575 return new_space; in grow()
|
D | api.cc | 5923 if (!isolate->heap()->new_space()->Contains(*obj)) return true; in CanMakeExternal()
|
/external/blktrace/ |
D | blkparse.c | 326 int new_space, size; in resize_cpu_info() local 339 new_space = (new_count - ncpus) * sizeof(struct per_cpu_info); in resize_cpu_info() 340 memset(new_start, 0, new_space); in resize_cpu_info()
|
/external/v8/test/cctest/compiler/ |
D | test-simplified-lowering.cc | 657 CHECK(t.heap()->new_space()->Contains(result) || flag[i] == TENURED); in TEST()
|