Home
last modified time | relevance | path

Searched refs:MemoryChunk (Results 1 – 25 of 49) sorted by relevance

12

/external/v8/src/heap/
Dheap-write-barrier-inl.h24 struct MemoryChunk { struct
30 V8_INLINE static heap_internals::MemoryChunk* FromHeapObject( in FromHeapObject() argument
32 return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(object) & in FromHeapObject()
52 heap_internals::MemoryChunk* value_chunk = in GenerationalBarrierInternal() argument
53 heap_internals::MemoryChunk::FromHeapObject(value); in GenerationalBarrierInternal()
54 heap_internals::MemoryChunk* object_chunk = in GenerationalBarrierInternal()
55 heap_internals::MemoryChunk::FromHeapObject(object); in GenerationalBarrierInternal()
65 heap_internals::MemoryChunk* value_chunk = in MarkingBarrierInternal()
66 heap_internals::MemoryChunk::FromHeapObject(value); in MarkingBarrierInternal()
106 heap_internals::MemoryChunk* array_chunk = in GenerationalBarrierForElements()
[all …]
Dmark-compact.h33 return MarkBitFrom(MemoryChunk::FromAddress(obj->address()), in MarkBitFrom()
37 V8_INLINE MarkBit MarkBitFrom(MemoryChunk* p, Address addr) { in MarkBitFrom()
75 MemoryChunk* p = MemoryChunk::FromAddress(obj->address()); in GreyToBlack()
82 void ClearLiveness(MemoryChunk* chunk) { in ClearLiveness()
90 MarkBitCellIterator(MemoryChunk* chunk, Bitmap* bitmap) : chunk_(chunk) { in MarkBitCellIterator()
145 MemoryChunk* chunk_;
168 inline iterator(MemoryChunk* chunk, Bitmap* bitmap, Address start);
186 MemoryChunk* const chunk_;
197 LiveObjectRange(MemoryChunk* chunk, Bitmap* bitmap) in LiveObjectRange()
207 MemoryChunk* const chunk_;
[all …]
Dspaces.cc157 Address aligned_base = ::RoundUp(base, MemoryChunk::kAlignment); in CodeRange()
238 DCHECK(IsAddressAligned(current.start, MemoryChunk::kAlignment)); in AllocateRawMemory()
249 DCHECK(IsAddressAligned(address, MemoryChunk::kAlignment)); in FreeRawMemory()
265 size_t aligned_requested = ::RoundUp(requested_size, MemoryChunk::kAlignment); in ReserveBlock()
271 DCHECK(IsAddressAligned(block->start, MemoryChunk::kAlignment)); in ReserveBlock()
428 MemoryChunk* chunk = nullptr; in PerformFreeMemoryOnQueuedNonRegularChunks()
436 MemoryChunk* chunk = nullptr; in PerformFreeMemoryOnQueuedChunks()
445 bool pooled = chunk->IsFlagSet(MemoryChunk::POOLED); in PerformFreeMemoryOnQueuedChunks()
574 Heap* MemoryChunk::synchronized_heap() { in synchronized_heap()
579 void MemoryChunk::InitializationMemoryFence() { in InitializationMemoryFence()
[all …]
Dremembered-set.h26 static void Insert(MemoryChunk* chunk, Address slot_addr) { in Insert()
39 static bool Contains(MemoryChunk* chunk, Address slot_addr) { in Contains()
53 static void Remove(MemoryChunk* chunk, Address slot_addr) { in Remove()
64 static void RemoveRange(MemoryChunk* chunk, Address start, Address end, in RemoveRange()
108 IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) { in Iterate()
120 MemoryChunk* chunk; in IterateMemoryChunks()
138 static void Iterate(MemoryChunk* chunk, Callback callback, in Iterate()
155 static int NumberOfPreFreedEmptyBuckets(MemoryChunk* chunk) { in NumberOfPreFreedEmptyBuckets()
168 static void PreFreeEmptyBuckets(MemoryChunk* chunk) { in PreFreeEmptyBuckets()
179 static void FreeEmptyBuckets(MemoryChunk* chunk) { in FreeEmptyBuckets()
[all …]
Dspaces.h46 class MemoryChunk; variable
252 class MemoryChunk {
256 size_t operator()(MemoryChunk* const chunk) const { in operator()
418 static MemoryChunk* FromAddress(Address a) { in FromAddress()
419 return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask); in FromAddress()
422 static MemoryChunk* FromHeapObject(const HeapObject* o) { in FromHeapObject()
423 return reinterpret_cast<MemoryChunk*>(reinterpret_cast<Address>(o) & in FromHeapObject()
430 static inline MemoryChunk* FromAnyPointerAddress(Heap* heap, Address addr);
437 MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1); in UpdateHighWaterMark()
448 return reinterpret_cast<Address>(const_cast<MemoryChunk*>(this)); in address()
[all …]
Dheap-inl.h266 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); in UpdateAllocationsHash()
346 bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace(); in InNewSpace()
374 return MemoryChunk::FromHeapObject(heap_object) in InFromSpace()
393 return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE); in InToSpace()
412 MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj); in FromWritableHeapObject()
426 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) && in ShouldBePromoted()
600 if (page->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in CodeSpaceMemoryModificationScope()
615 if (page->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in ~CodeSpaceMemoryModificationScope()
643 MemoryChunk* chunk) in CodePageMemoryModificationScope()
646 chunk_->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) { in CodePageMemoryModificationScope()
[all …]
Dspaces-inl.h100 ? MemoryChunk::FromAddress(o->address())->InToSpace() in Contains()
101 : MemoryChunk::FromAddress(o->address())->InFromSpace(); in Contains()
110 if (p == MemoryChunk::FromAddress(a)) return true; in ContainsSlow()
119 return MemoryChunk::FromAddress(o->address())->InNewSpace(); in Contains()
143 return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this; in Contains()
184 MemoryChunk* MemoryChunk::FromAnyPointerAddress(Heap* heap, Address addr) { in FromAnyPointerAddress()
185 MemoryChunk* chunk = heap->lo_space()->FindPage(addr); in FromAnyPointerAddress()
187 chunk = MemoryChunk::FromAddress(addr); in FromAnyPointerAddress()
225 MemoryChunk* MemoryChunkIterator::next() { in next()
Dconcurrent-marking.h28 std::unordered_map<MemoryChunk*, intptr_t, MemoryChunk::Hasher>;
80 void ClearLiveness(MemoryChunk* chunk);
Dheap.cc930 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); in InvalidateCodeEmbeddedObjects()
936 MemoryChunk* chunk = MemoryChunk::FromAddress(code->address()); in InvalidateCodeDeoptimizationData()
2016 explicit PageScavengingItem(MemoryChunk* chunk) : chunk_(chunk) {} in PageScavengingItem()
2022 MemoryChunk* const chunk_;
2136 this, [&job](MemoryChunk* chunk) { in Scavenge()
2217 RememberedSet<OLD_TO_NEW>::IterateMemoryChunks(this, [](MemoryChunk* chunk) { in Scavenge()
2251 void Heap::UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk) { in UnprotectAndRegisterMemoryChunk()
2261 UnprotectAndRegisterMemoryChunk(MemoryChunk::FromAddress(object->address())); in UnprotectAndRegisterMemoryChunk()
2264 void Heap::UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk) { in UnregisterUnprotectedMemoryChunk()
2337 std::map<MemoryChunk*, size_t> size_map; in VerifyNewSpace()
[all …]
Dconcurrent-marking.cc36 Bitmap* bitmap(const MemoryChunk* chunk) { in bitmap()
37 return Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize); in bitmap()
40 void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { in IncrementLiveBytes()
110 MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address()); in ProcessWeakHeapObject()
396 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); in MarkObject()
764 void ConcurrentMarking::ClearLiveness(MemoryChunk* chunk) { in ClearLiveness()
Dscavenger.h33 void ScavengePage(MemoryChunk* page);
55 void AddPageToSweeperIfNecessary(MemoryChunk* page);
Dmark-compact-inl.h278 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); in VisitFixedArrayIncremental()
280 if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { in VisitFixedArrayIncremental()
406 LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap, in iterator()
487 DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end)); in AdvanceToNextValidObject()
Dmark-compact.cc63 virtual Bitmap* bitmap(const MemoryChunk* chunk) = 0;
172 Bitmap* bitmap(const MemoryChunk* chunk) override { in bitmap()
511 MemoryChunk::FromAddress(obj->address()))); in VerifyMarkbitsAreClean()
641 if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) { in CollectEvacuationCandidates()
644 p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); in CollectEvacuationCandidates()
2334 static inline EvacuationMode ComputeEvacuationMode(MemoryChunk* chunk) { in ComputeEvacuationMode()
2336 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION)) in ComputeEvacuationMode()
2338 if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION)) in ComputeEvacuationMode()
2434 page->IsFlagSet(MemoryChunk::IS_EXECUTABLE), in EvacuatePage()
2622 if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) { in EvacuatePagesInParallel()
[all …]
Dinvalidated-slots.h34 explicit InvalidatedSlotsFilter(MemoryChunk* chunk);
Dincremental-marking.cc151 DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone()); in NotifyLeftTrimming()
152 DCHECK_EQ(MemoryChunk::FromAddress(from->address()), in NotifyLeftTrimming()
153 MemoryChunk::FromAddress(to->address())); in NotifyLeftTrimming()
730 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); in IsFixedArrayWithProgressBar()
731 return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR); in IsFixedArrayWithProgressBar()
Dscavenger.cc109 void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) { in AddPageToSweeperIfNecessary()
118 void Scavenger::ScavengePage(MemoryChunk* page) { in ScavengePage()
Dinvalidated-slots.cc11 InvalidatedSlotsFilter::InvalidatedSlotsFilter(MemoryChunk* chunk) { in InvalidatedSlotsFilter()
Dstore-buffer.cc162 MemoryChunk* chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr); in MoveEntriesToRememberedSet()
Dscavenger-inl.h42 MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address()); in PageMemoryFence()
Dheap.h601 void UnprotectAndRegisterMemoryChunk(MemoryChunk* chunk);
603 void UnregisterUnprotectedMemoryChunk(MemoryChunk* chunk);
2242 std::unordered_set<MemoryChunk*> unprotected_memory_chunks_;
2376 explicit inline CodePageMemoryModificationScope(MemoryChunk* chunk);
2380 MemoryChunk* chunk_;
/external/vulkan-validation-layers/layers/
Dgpu_validation.h52 struct MemoryChunk { struct
63 std::list<MemoryChunk> chunk_list_; argument
66 VkResult AllocMemoryChunk(MemoryChunk &chunk);
67 void FreeMemoryChunk(MemoryChunk &chunk);
Dgpu_validation.cpp62 MemoryChunk *pChunk = nullptr; in GetBlock()
72 MemoryChunk new_chunk; in GetBlock()
104 [&block](const MemoryChunk &c) { return c.buffer == block.buffer; }); in PutBackBlock()
142 VkResult GpuDeviceMemoryManager::AllocMemoryChunk(MemoryChunk &chunk) { in AllocMemoryChunk()
200 void GpuDeviceMemoryManager::FreeMemoryChunk(MemoryChunk &chunk) { in FreeMemoryChunk()
/external/v8/src/
Disolate-inl.h15 i::MemoryChunk* chunk = i::MemoryChunk::FromHeapObject(obj); in FromWritableHeapObject()
/external/v8/src/builtins/
Dbuiltins-internal-gen.cc227 IntPtrConstant(MemoryChunk::kFlagsOffset)); in IsPageFlagSet()
256 IntPtrConstant(MemoryChunk::kHeaderSize)); in GetMarkBit()
385 IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask); in TF_BUILTIN()
389 IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask); in TF_BUILTIN()
412 GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask), in TF_BUILTIN()
415 IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask), in TF_BUILTIN()
/external/v8/src/ia32/
Dmacro-assembler-ia32.cc384 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done, in RecordWrite()
388 MemoryChunk::kPointersFromHereAreInterestingMask, in RecordWrite()
1841 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); in CheckPageFlag()
1843 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask)); in CheckPageFlag()

12