/external/v8/src/heap/ |
D | heap-write-barrier-inl.h | 24 struct MemoryChunk { struct 30 V8_INLINE static heap_internals::MemoryChunk* FromHeapObject( in FromHeapObject() argument 32 return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(object) & in FromHeapObject() 52 heap_internals::MemoryChunk* value_chunk = in GenerationalBarrierInternal() argument 53 heap_internals::MemoryChunk::FromHeapObject(value); in GenerationalBarrierInternal() 54 heap_internals::MemoryChunk* object_chunk = in GenerationalBarrierInternal() 55 heap_internals::MemoryChunk::FromHeapObject(object); in GenerationalBarrierInternal() 65 heap_internals::MemoryChunk* value_chunk = in MarkingBarrierInternal() 66 heap_internals::MemoryChunk::FromHeapObject(value); in MarkingBarrierInternal() 106 heap_internals::MemoryChunk* array_chunk = in GenerationalBarrierForElements() [all …]
|
D | mark-compact.h | 33 return MarkBitFrom(MemoryChunk::FromAddress(obj->address()), in MarkBitFrom() 37 V8_INLINE MarkBit MarkBitFrom(MemoryChunk* p, Address addr) { in MarkBitFrom() 75 MemoryChunk* p = MemoryChunk::FromAddress(obj->address()); in GreyToBlack() 82 void ClearLiveness(MemoryChunk* chunk) { in ClearLiveness() 90 MarkBitCellIterator(MemoryChunk* chunk, Bitmap* bitmap) : chunk_(chunk) { in MarkBitCellIterator() 145 MemoryChunk* chunk_; 168 inline iterator(MemoryChunk* chunk, Bitmap* bitmap, Address start); 186 MemoryChunk* const chunk_; 197 LiveObjectRange(MemoryChunk* chunk, Bitmap* bitmap) in LiveObjectRange() 207 MemoryChunk* const chunk_; [all …]
|
D | spaces.cc | 157 Address aligned_base = ::RoundUp(base, MemoryChunk::kAlignment); in CodeRange() 238 DCHECK(IsAddressAligned(current.start, MemoryChunk::kAlignment)); in AllocateRawMemory() 249 DCHECK(IsAddressAligned(address, MemoryChunk::kAlignment)); in FreeRawMemory() 265 size_t aligned_requested = ::RoundUp(requested_size, MemoryChunk::kAlignment); in ReserveBlock() 271 DCHECK(IsAddressAligned(block->start, MemoryChunk::kAlignment)); in ReserveBlock() 428 MemoryChunk* chunk = nullptr; in PerformFreeMemoryOnQueuedNonRegularChunks() 436 MemoryChunk* chunk = nullptr; in PerformFreeMemoryOnQueuedChunks() 445 bool pooled = chunk->IsFlagSet(MemoryChunk::POOLED); in PerformFreeMemoryOnQueuedChunks() 574 Heap* MemoryChunk::synchronized_heap() { in synchronized_heap() 579 void MemoryChunk::InitializationMemoryFence() { in InitializationMemoryFence() [all …]
|
D | remembered-set.h | 26 static void Insert(MemoryChunk* chunk, Address slot_addr) { in Insert() 39 static bool Contains(MemoryChunk* chunk, Address slot_addr) { in Contains() 53 static void Remove(MemoryChunk* chunk, Address slot_addr) { in Remove() 64 static void RemoveRange(MemoryChunk* chunk, Address start, Address end, in RemoveRange() 108 IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) { in Iterate() 120 MemoryChunk* chunk; in IterateMemoryChunks() 138 static void Iterate(MemoryChunk* chunk, Callback callback, in Iterate() 155 static int NumberOfPreFreedEmptyBuckets(MemoryChunk* chunk) { in NumberOfPreFreedEmptyBuckets() 168 static void PreFreeEmptyBuckets(MemoryChunk* chunk) { in PreFreeEmptyBuckets() 179 static void FreeEmptyBuckets(MemoryChunk* chunk) { in FreeEmptyBuckets() [all …]
|
D | spaces.h | 46 class MemoryChunk; variable 252 class MemoryChunk { 256 size_t operator()(MemoryChunk* const chunk) const { in operator() 418 static MemoryChunk* FromAddress(Address a) { in FromAddress() 419 return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask); in FromAddress() 422 static MemoryChunk* FromHeapObject(const HeapObject* o) { in FromHeapObject() 423 return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) & in FromHeapObject() 430 static inline MemoryChunk* FromAnyPointerAddress(Heap* heap, Address addr); 437 MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1); in UpdateHighWaterMark() 448 return reinterpret_cast<Address>(const_cast<MemoryChunk*>(this)); in address() [all …]
|
D | heap-inl.h | 266 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); in UpdateAllocationsHash() 346 bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace(); in InNewSpace() 374 return MemoryChunk::FromHeapObject(heap_object) in InFromSpace() 393 return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE); in InToSpace() 412 MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj); in FromWritableHeapObject() 426 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && in ShouldBePromoted() 600 if (page->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in CodeSpaceMemoryModificationScope() 615 if (page->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in ~CodeSpaceMemoryModificationScope() 643 MemoryChunk* chunk) in CodePageMemoryModificationScope() 646 chunk_->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in CodePageMemoryModificationScope() [all …]
|
D | spaces-inl.h | 100 ? MemoryChunk::FromAddress(o->address())->InToSpace() in Contains() 101 : MemoryChunk::FromAddress(o->address())->InFromSpace(); in Contains() 110 if (p == MemoryChunk::FromAddress(a)) return true; in ContainsSlow() 119 return MemoryChunk::FromAddress(o->address())->InNewSpace(); in Contains() 143 return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this; in Contains() 184 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { in FromAnyPointerAddress() 185 MemoryChunk* chunk = heap->lo_space()->FindPage(addr); in FromAnyPointerAddress() 187 chunk = MemoryChunk::FromAddress(addr); in FromAnyPointerAddress() 225 MemoryChunk* MemoryChunkIterator::next() { in next()
|
D | concurrent-marking.h | 28 std::unordered_map<MemoryChunk*, intptr_t, MemoryChunk::Hasher>; 80 void ClearLiveness(MemoryChunk* chunk);
|
D | heap.cc | 930 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); in InvalidateCodeEmbeddedObjects() 936 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); in InvalidateCodeDeoptimizationData() 2016 explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {} in PageScavengingItem() 2022 MemoryChunk* const chunk_; 2136 this, [&job](MemoryChunk* chunk) { in Scavenge() 2217 RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(this, [](MemoryChunk* chunk) { in Scavenge() 2251 void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk) { in UnprotectAndRegisterMemoryChunk() 2261 UnprotectAndRegisterMemoryChunk(MemoryChunk::FromAddress(object->address())); in UnprotectAndRegisterMemoryChunk() 2264 void Heap::UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk) { in UnregisterUnprotectedMemoryChunk() 2337 std::map<MemoryChunk*, size_t> size_map; in VerifyNewSpace() [all …]
|
D | concurrent-marking.cc | 36 Bitmap* bitmap(const MemoryChunk* chunk) { in bitmap() 37 return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize); in bitmap() 40 void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { in IncrementLiveBytes() 110 MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address()); in ProcessWeakHeapObject() 396 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); in MarkObject() 764 void ConcurrentMarking::ClearLiveness(MemoryChunk* chunk) { in ClearLiveness()
|
D | scavenger.h | 33 void ScavengePage(MemoryChunk* page); 55 void AddPageToSweeperIfNecessary(MemoryChunk* page);
|
D | mark-compact-inl.h | 278 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); in VisitFixedArrayIncremental() 280 if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { in VisitFixedArrayIncremental() 406 LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap, in iterator() 487 DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end)); in AdvanceToNextValidObject()
|
D | mark-compact.cc | 63 virtual Bitmap* bitmap(const MemoryChunk* chunk) = 0; 172 Bitmap* bitmap(const MemoryChunk* chunk) override { in bitmap() 511 MemoryChunk::FromAddress(obj->address()))); in VerifyMarkbitsAreClean() 641 if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) { in CollectEvacuationCandidates() 644 p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); in CollectEvacuationCandidates() 2334 static inline EvacuationMode ComputeEvacuationMode(MemoryChunk* chunk) { in ComputeEvacuationMode() 2336 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION)) in ComputeEvacuationMode() 2338 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION)) in ComputeEvacuationMode() 2434 page->IsFlagSet(MemoryChunk::IS_EXECUTABLE), in EvacuatePage() 2622 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { in EvacuatePagesInParallel() [all …]
|
D | invalidated-slots.h | 34 explicit InvalidatedSlotsFilter(MemoryChunk* chunk);
|
D | incremental-marking.cc | 151 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); in NotifyLeftTrimming() 152 DCHECK_EQ(MemoryChunk::FromAddress(from->address()), in NotifyLeftTrimming() 153 MemoryChunk::FromAddress(to->address())); in NotifyLeftTrimming() 730 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); in IsFixedArrayWithProgressBar() 731 return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR); in IsFixedArrayWithProgressBar()
|
D | scavenger.cc | 109 void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) { in AddPageToSweeperIfNecessary() 118 void Scavenger::ScavengePage(MemoryChunk* page) { in ScavengePage()
|
D | invalidated-slots.cc | 11 InvalidatedSlotsFilter::InvalidatedSlotsFilter(MemoryChunk* chunk) { in InvalidatedSlotsFilter()
|
D | store-buffer.cc | 162 MemoryChunk* chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr); in MoveEntriesToRememberedSet()
|
D | scavenger-inl.h | 42 MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address()); in PageMemoryFence()
|
D | heap.h | 601 void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk); 603 void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk); 2242 std::unordered_set<MemoryChunk*> unprotected_memory_chunks_; 2376 explicit inline CodePageMemoryModificationScope(MemoryChunk* chunk); 2380 MemoryChunk* chunk_;
|
/external/vulkan-validation-layers/layers/ |
D | gpu_validation.h | 52 struct MemoryChunk { struct 63 std::list<MemoryChunk> chunk_list_; argument 66 VkResult AllocMemoryChunk(MemoryChunk &chunk); 67 void FreeMemoryChunk(MemoryChunk &chunk);
|
D | gpu_validation.cpp | 62 MemoryChunk *pChunk = nullptr; in GetBlock() 72 MemoryChunk new_chunk; in GetBlock() 104 [&block](const MemoryChunk &c) { return c.buffer == block.buffer; }); in PutBackBlock() 142 VkResult GpuDeviceMemoryManager::AllocMemoryChunk(MemoryChunk &chunk) { in AllocMemoryChunk() 200 void GpuDeviceMemoryManager::FreeMemoryChunk(MemoryChunk &chunk) { in FreeMemoryChunk()
|
/external/v8/src/ |
D | isolate-inl.h | 15 i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj); in FromWritableHeapObject()
|
/external/v8/src/builtins/ |
D | builtins-internal-gen.cc | 227 IntPtrConstant(MemoryChunk::kFlagsOffset)); in IsPageFlagSet() 256 IntPtrConstant(MemoryChunk::kHeaderSize)); in GetMarkBit() 385 IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask); in TF_BUILTIN() 389 IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask); in TF_BUILTIN() 412 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask), in TF_BUILTIN() 415 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask), in TF_BUILTIN()
|
/external/v8/src/ia32/ |
D | macro-assembler-ia32.cc | 384 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done, in RecordWrite() 388 MemoryChunk::kPointersFromHereAreInterestingMask, in RecordWrite() 1841 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); in CheckPageFlag() 1843 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); in CheckPageFlag()
|