Lines Matching refs:RoundUp
140 Address aligned_base = RoundUp(base, MemoryChunk::kAlignment); in SetUp()
263 size_t aligned_requested = RoundUp(requested_size, MemoryChunk::kAlignment); in ReserveBlock()
299 capacity_ = RoundUp(capacity, Page::kPageSize); in SetUp()
300 capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); in SetUp()
453 RoundUp(static_cast<Address>(reservation.address()), alignment); in ReserveAlignedMemory()
551 RoundUp(header_size + requested, MemoryAllocator::GetCommitPageSize()); in CommitArea()
552 size_t committed_size = RoundUp(header_size + (area_end() - area_start()), in CommitArea()
680 chunk_size = RoundUp(CodePageAreaStartOffset() + reserve_area_size, in AllocateChunk()
692 size_t commit_size = RoundUp(CodePageGuardStartOffset() + commit_area_size, in AllocateChunk()
728 chunk_size = RoundUp(MemoryChunk::kObjectStartOffset + reserve_area_size, in AllocateChunk()
731 RoundUp(MemoryChunk::kObjectStartOffset + commit_area_size, in AllocateChunk()
1015 return RoundUp(Page::kObjectStartOffset, GetCommitPageSize()); in AllocateChunk()
1562 size_t rounded_new_capacity = RoundUp(new_capacity, Page::kPageSize); in AllocateChunk()
2904 size_t used_size = RoundUp((object->address() - address()) + object->Size(), in AllocateChunk()
3086 uintptr_t start = RoundUp(reinterpret_cast<uintptr_t>(free_start), in AllocateChunk()