Home
last modified time | relevance | path

Searched refs:kAlignment (Results 1 – 20 of 20) sorted by relevance

/art/runtime/gc/accounting/
Dspace_bitmap.cc31 template<size_t kAlignment>
32 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) { in ComputeBitmapSize()
33 const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT; in ComputeBitmapSize()
37 template<size_t kAlignment>
38 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) { in ComputeHeapSize()
39 return bitmap_bytes * kBitsPerByte * kAlignment; in ComputeHeapSize()
42 template<size_t kAlignment>
43 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap()
51 template<size_t kAlignment>
52 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_be… in SpaceBitmap()
[all …]
Dspace_bitmap-inl.h32 template<size_t kAlignment>
33 inline bool SpaceBitmap<kAlignment>::AtomicTestAndSet(const mirror::Object* obj) { in AtomicTestAndSet()
54 template<size_t kAlignment>
55 inline bool SpaceBitmap<kAlignment>::Test(const mirror::Object* obj) const { in Test()
64 template<size_t kAlignment> template<typename Visitor>
65 inline void SpaceBitmap<kAlignment>::VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, in VisitMarkedRange()
69 for (uintptr_t i = visit_begin; i < visit_end; i += kAlignment) { in VisitMarkedRange()
85 const size_t bit_start = (offset_start / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange()
86 const size_t bit_end = (offset_end / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange()
111 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment); in VisitMarkedRange()
[all …]
Dbitmap.cc78 template<size_t kAlignment>
79 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::Create( in Create()
81 CHECK_ALIGNED(cover_begin, kAlignment); in Create()
82 CHECK_ALIGNED(cover_end, kAlignment); in Create()
83 const size_t num_bits = (cover_end - cover_begin) / kAlignment; in Create()
88 template<size_t kAlignment>
89 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap()
Dspace_bitmap.h41 template<size_t kAlignment>
63 return offset / kAlignment / kBitsPerIntPtrT; in OffsetToIndex()
68 return static_cast<T>(index * kAlignment * kBitsPerIntPtrT); in IndexToOffset()
73 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerIntPtrT); in OffsetToMask()
119 for (; visit_begin < visit_end; visit_begin += kAlignment) { in VisitRange()
210 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
234 template<size_t kAlignment>
235 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Dspace_bitmap_test.cc113 template <size_t kAlignment>
127 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest()
141 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest()
143 size_t end = offset + RoundDown(r.next() % (remain + 1), kAlignment); in RunTest()
149 for (uintptr_t k = offset; k < end; k += kAlignment) { in RunTest()
Dbitmap.h128 template<size_t kAlignment>
148 const uintptr_t addr = CoverBegin() + bit_index * kAlignment; in AddrFromBitIndex()
156 return (addr - CoverBegin()) / kAlignment; in BitIndexFromAddr()
182 : Bitmap(mem_map, num_bits), cover_begin_(begin), cover_end_(begin + kAlignment * num_bits) { in MemoryRangeBitmap()
Dcard_table.h43 template<size_t kAlignment> class SpaceBitmap;
/art/runtime/gc/space/
Dbump_pointer_space-inl.h30 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
47 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe()
66 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocNonvirtualWithoutAccounting()
93 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
Dlarge_object_space.cc266 return AlignSize() * FreeListSpace::kAlignment; in ByteSize()
271 DCHECK_ALIGNED(size, FreeListSpace::kAlignment); in SetByteSize()
272 alloc_size_ = (size / FreeListSpace::kAlignment) | (free ? kFlagFree : 0u); in SetByteSize()
311 return GetPrevFree() * FreeListSpace::kAlignment; in GetPrevFreeBytes()
315 DCHECK_ALIGNED(bytes, FreeListSpace::kAlignment); in SetPrevFreeBytes()
316 prev_free_ = bytes / FreeListSpace::kAlignment; in SetPrevFreeBytes()
355 CHECK_EQ(size % kAlignment, 0U); in Create()
369 CHECK_ALIGNED(space_capacity, kAlignment); in FreeListSpace()
370 const size_t alloc_info_size = sizeof(AllocationInfo) * (space_capacity / kAlignment); in FreeListSpace()
413 DCHECK_ALIGNED(obj, kAlignment); in Free()
[all …]
Dregion_space-inl.h29 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
46 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocNonvirtual()
118 DCHECK_ALIGNED(num_bytes, kAlignment); in Alloc()
146 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
261 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
268 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocLarge()
Dlarge_object_space.h162 static constexpr size_t kAlignment = kPageSize;
179 return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment; in GetSlotIndexForAddress()
185 return reinterpret_cast<uintptr_t>(Begin()) + slot * kAlignment; in GetAllocationAddressForSlot()
Dbump_pointer_space.h160 static constexpr size_t kAlignment = 8; variable
191 static_assert(sizeof(BlockHeader) % kAlignment == 0,
Dbump_pointer_space.cc93 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
141 bytes = RoundUp(bytes, kAlignment); in AllocBlock()
Dregion_space.cc79 DCHECK(full_region_.Alloc(kAlignment, &ignored, nullptr, &ignored) == nullptr); in RegionSpace()
Dregion_space.h169 static constexpr size_t kAlignment = kObjectAlignment; variable
/art/runtime/gc/
Dheap-inl.h74 byte_count = RoundUp(byte_count, space::BumpPointerSpace::kAlignment); in AllocObjectWithAllocator()
252 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
314 DCHECK_ALIGNED(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
338 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
345 DCHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
/art/runtime/entrypoints/quick/
Dquick_alloc_entrypoints.cc39 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
65 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
90 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
/art/runtime/base/
Darena_allocator.h307 bytes = RoundUp(bytes, kAlignment);
369 static constexpr size_t kAlignment = 8; variable
/art/runtime/gc/collector/
Dconcurrent_copying.cc1392 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in operator ()()
1738 CHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1740 size_t min_object_size = RoundUp(sizeof(mirror::Object), space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1757 CHECK_ALIGNED(it->first - alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1768 CHECK_ALIGNED(byte_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1776 CHECK_ALIGNED(byte_size - alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1792 size_t region_space_alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in Copy()
Dmark_compact.cc74 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment); in ForwardObject()
94 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment); in operator ()()