Searched refs:kRegionSize (Results 1 – 6 of 6) sorted by relevance
/art/runtime/gc/accounting/ |
D | read_barrier_table.h | 36 size_t capacity = static_cast<size_t>(kHeapCapacity / kRegionSize); in ReadBarrierTable() 37 DCHECK_EQ(kHeapCapacity / kRegionSize, in ReadBarrierTable() 38 static_cast<uint64_t>(static_cast<size_t>(kHeapCapacity / kRegionSize))); in ReadBarrierTable() 54 DCHECK(IsAligned<kRegionSize>(start_addr)); in Clear() 55 DCHECK(IsAligned<kRegionSize>(end_addr)); in Clear() 83 static constexpr size_t kRegionSize = 1 * MB; variable 91 uint8_t* entry_addr = mem_map_->Begin() + reinterpret_cast<uintptr_t>(heap_addr) / kRegionSize; in EntryFromAddr()
|
/art/runtime/gc/space/ |
D | region_space.cc | 33 capacity = RoundUp(capacity, kRegionSize); in Create() 52 CHECK_ALIGNED(mem_map_size, kRegionSize); in RegionSpace() 53 CHECK_ALIGNED(mem_map->Begin(), kRegionSize); in RegionSpace() 54 num_regions_ = mem_map_size / kRegionSize; in RegionSpace() 59 for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) { in RegionSpace() 60 regions_[i] = Region(i, region_addr, region_addr + kRegionSize); in RegionSpace() 66 CHECK_EQ(static_cast<size_t>(regions_[i].End() - regions_[i].Begin()), kRegionSize); in RegionSpace() 91 return num_regions * kRegionSize; in FromSpaceSize() 103 return num_regions * kRegionSize; in UnevacFromSpaceSize() 115 return num_regions * kRegionSize; in ToSpaceSize() [all …]
|
D | region_space-inl.h | 48 if (LIKELY(num_bytes <= kRegionSize)) { in AllocNonvirtual() 145 if (LIKELY(num_bytes <= kRegionSize)) { in AllocationSizeNonvirtual() 150 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual() 270 DCHECK_GT(num_bytes, kRegionSize); in AllocLarge() 271 size_t num_regs = RoundUp(num_bytes, kRegionSize) / kRegionSize; in AllocLarge() 273 DCHECK_LT((num_regs - 1) * kRegionSize, num_bytes); in AllocLarge() 274 DCHECK_LE(num_bytes, num_regs * kRegionSize); in AllocLarge() 313 *usable_size = num_regs * kRegionSize; in AllocLarge()
|
D | region_space.h | 169 static constexpr size_t kRegionSize = 1 * MB; variable 246 DCHECK_EQ(static_cast<size_t>(end - begin), kRegionSize); in Region() 392 size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in GetLivePercent() 401 DCHECK_LT(begin_ + kRegionSize, top_); in BytesAllocated() 410 DCHECK_LE(bytes, kRegionSize); in BytesAllocated() 498 size_t reg_idx = offset / kRegionSize; in RefToRegionLocked()
|
/art/runtime/gc/ |
D | heap-inl.h | 330 if (space::RegionSpace::kRegionSize >= alloc_size) { in TryToAllocate() 332 … if (LIKELY(!IsOutOfMemoryOnAllocation<kGrow>(allocator_type, space::RegionSpace::kRegionSize))) { in TryToAllocate() 340 *bytes_tl_bulk_allocated = space::RegionSpace::kRegionSize; in TryToAllocate()
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 46 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize, in ConcurrentCopying() 1350 if (bytes_allocated > space::RegionSpace::kRegionSize) { in Copy()
|