Lines Matching refs:Page

79   DCHECK((OffsetFrom(address) & Page::kPageAlignmentMask) == 0)
85 DCHECK((0 < size) && (size <= Page::kMaxRegularHeapObjectSize))
88 DCHECK((Page::kObjectStartOffset <= offset) && (offset <= Page::kPageSize))
703 class Page : public MemoryChunk {
709 INLINE(static Page* FromAddress(Address a)) { in INLINE()
710 return reinterpret_cast<Page*>(OffsetFrom(a) & ~kPageAlignmentMask); in INLINE()
717 INLINE(static Page* FromAllocationTop(Address top)) { in INLINE()
718 Page* p = FromAddress(top - kPointerSize); in INLINE()
723 inline Page* next_page();
724 inline Page* prev_page();
725 inline void set_next_page(Page* page);
726 inline void set_prev_page(Page* page);
761 static inline Page* Initialize(Heap* heap, MemoryChunk* chunk,
793 STATIC_ASSERT(sizeof(Page) <= MemoryChunk::kHeaderSize);
910 DCHECK(size >= static_cast<size_t>(Page::kPageSize)); in FreeBlock()
915 DCHECK(size >= static_cast<size_t>(Page::kPageSize)); in FreeBlock()
965 return (OffsetFrom(addr) & Page::kPageAlignmentMask) >> kRegionSizeLog2; in RegionNumber()
969 Page* page = Page::FromAddress(addr); in Update()
982 static const int kSize = Page::kPageSize / kRegionSize;
984 STATIC_ASSERT(Page::kPageSize % kRegionSize == 0);
1007 Page* AllocatePage(intptr_t size, PagedSpace* owner,
1032 return (Available() / Page::kPageSize) * Page::kMaxRegularHeapObjectSize; in MaxAvailable()
1145 Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
1189 HeapObjectIterator(Page* page, HeapObjectCallback size_func);
1234 inline Page* next();
1238 Page* prev_page_; // Previous page returned.
1241 Page* next_page_;
1286 return (Page::FromAllocationTop(top_) == Page::FromAllocationTop(limit_)) && in VerifyPagedAllocation()
1447 intptr_t EvictFreeListItemsInList(Page* p);
1448 bool ContainsPageFreeListItemsInList(Page* p);
1569 intptr_t EvictFreeListItems(Page* p);
1570 bool ContainsPageFreeListItems(Page* p);
1580 static const int kMaxBlockSize = Page::kMaxRegularHeapObjectSize;
1707 void ObtainFreeListStatistics(Page* p, SizeStats* sizes);
1778 Page::FromAddress(top) == Page::FromAddress(limit - 1)); in SetTopAndLimit()
1798 void ReleasePage(Page* page);
1801 Page* anchor() { return &anchor_; } in anchor()
1827 static bool ShouldBeSweptBySweeperThreads(Page* p) { in ShouldBeSweptBySweeperThreads()
1829 !p->IsFlagSet(Page::RESCAN_ON_EVACUATION) && !p->WasSwept(); in ShouldBeSweptBySweeperThreads()
1834 void IncreaseUnsweptFreeBytes(Page* p) { in IncreaseUnsweptFreeBytes()
1841 void DecreaseUnsweptFreeBytes(Page* p) { in DecreaseUnsweptFreeBytes()
1854 void set_end_of_unswept_pages(Page* page) { end_of_unswept_pages_ = page; } in set_end_of_unswept_pages()
1856 Page* end_of_unswept_pages() { return end_of_unswept_pages_; } in end_of_unswept_pages()
1858 Page* FirstPage() { return anchor_.next_page(); } in FirstPage()
1859 Page* LastPage() { return anchor_.prev_page(); } in LastPage()
1891 Page anchor_;
1906 Page* end_of_unswept_pages_;
1987 static const int kAreaSize = Page::kMaxRegularHeapObjectSize;
2006 return (reinterpret_cast<intptr_t>(addr) & Page::kPageAlignmentMask) == in IsAtStart()
2011 return (reinterpret_cast<intptr_t>(addr) & Page::kPageAlignmentMask) == 0; in IsAtEnd()
2020 ~Page::kPageAlignmentMask); in FromAddress()
2369 return (to_space_.TotalCapacity() / Page::kPageSize) * in Capacity()
2639 static const int kMapsPerPage = Page::kMaxRegularHeapObjectSize / Map::kSize;
2722 if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0; in ObjectSizeFor()
2723 return chunk_size - Page::kPageSize - Page::kObjectStartOffset; in ObjectSizeFor()