Searched refs:kRegionSize (Results 1 – 6 of 6) sorted by relevance
/art/runtime/gc/accounting/ |
D | read_barrier_table.h | 36 size_t capacity = static_cast<size_t>(kHeapCapacity / kRegionSize); in ReadBarrierTable() 37 DCHECK_EQ(kHeapCapacity / kRegionSize, in ReadBarrierTable() 38 static_cast<uint64_t>(static_cast<size_t>(kHeapCapacity / kRegionSize))); in ReadBarrierTable() 54 DCHECK_ALIGNED(start_addr, kRegionSize); in Clear() 55 DCHECK_ALIGNED(end_addr, kRegionSize); in Clear() 83 static constexpr size_t kRegionSize = 1 * MB; variable 91 uint8_t* entry_addr = mem_map_->Begin() + reinterpret_cast<uintptr_t>(heap_addr) / kRegionSize; in EntryFromAddr()
|
/art/runtime/gc/space/ |
D | region_space.cc | 33 capacity = RoundUp(capacity, kRegionSize); in Create() 52 CHECK_ALIGNED(mem_map_size, kRegionSize); in RegionSpace() 53 CHECK_ALIGNED(mem_map->Begin(), kRegionSize); in RegionSpace() 54 num_regions_ = mem_map_size / kRegionSize; in RegionSpace() 59 for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) { in RegionSpace() 60 regions_[i] = Region(i, region_addr, region_addr + kRegionSize); in RegionSpace() 66 CHECK_EQ(static_cast<size_t>(regions_[i].End() - regions_[i].Begin()), kRegionSize); in RegionSpace() 91 return num_regions * kRegionSize; in FromSpaceSize() 103 return num_regions * kRegionSize; in UnevacFromSpaceSize() 115 return num_regions * kRegionSize; in ToSpaceSize() [all …]
|
D | region_space-inl.h | 48 if (LIKELY(num_bytes <= kRegionSize)) { in AllocNonvirtual() 144 if (LIKELY(num_bytes <= kRegionSize)) { in AllocationSizeNonvirtual() 149 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual() 269 DCHECK_GT(num_bytes, kRegionSize); in AllocLarge() 270 size_t num_regs = RoundUp(num_bytes, kRegionSize) / kRegionSize; in AllocLarge() 272 DCHECK_LT((num_regs - 1) * kRegionSize, num_bytes); in AllocLarge() 273 DCHECK_LE(num_bytes, num_regs * kRegionSize); in AllocLarge() 312 *usable_size = num_regs * kRegionSize; in AllocLarge()
|
D | region_space.h | 171 static constexpr size_t kRegionSize = 1 * MB; variable 248 DCHECK_EQ(static_cast<size_t>(end - begin), kRegionSize); in Region() 394 size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in GetLivePercent() 403 DCHECK_LT(begin_ + kRegionSize, top_); in BytesAllocated() 412 DCHECK_LE(bytes, kRegionSize); in BytesAllocated() 500 size_t reg_idx = offset / kRegionSize; in RefToRegionLocked()
|
/art/runtime/gc/ |
D | heap-inl.h | 347 if (space::RegionSpace::kRegionSize >= alloc_size) { in TryToAllocate() 349 … if (LIKELY(!IsOutOfMemoryOnAllocation<kGrow>(allocator_type, space::RegionSpace::kRegionSize))) { in TryToAllocate() 357 *bytes_tl_bulk_allocated = space::RegionSpace::kRegionSize; in TryToAllocate()
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 58 static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize, in ConcurrentCopying() 1852 if (bytes_allocated > space::RegionSpace::kRegionSize) { in Copy()
|