Home
last modified time | relevance | path

Searched refs:kAlignment (Results 1 – 25 of 33) sorted by relevance

12

/art/libartbase/base/
Darena_allocator_test.cc141 for (size_t size = 1; size <= ArenaAllocator::kAlignment + 1; ++size) { in TEST_F()
143 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation)) in TEST_F()
158 const size_t original_size = ArenaAllocator::kAlignment * 2; in TEST_F()
161 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F()
171 const size_t original_size = ArenaAllocator::kAlignment * 2; in TEST_F()
174 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2); in TEST_F()
184 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2); in TEST_F()
187 const size_t new_size = ArenaAllocator::kAlignment * 4; in TEST_F()
197 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2); in TEST_F()
200 const size_t new_size = ArenaAllocator::kAlignment * 3; in TEST_F()
[all …]
Dscoped_arena_allocator.h71 static constexpr size_t kAlignment = 8u; variable
100 size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment); in Alloc()
108 ptr += kAlignment; in Alloc()
Darena_allocator.h271 bytes = RoundUp(bytes, kAlignment);
277 DCHECK_ALIGNED(ret, kAlignment);
311 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
317 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
324 DCHECK_ALIGNED(ptr_, kAlignment);
359 static constexpr size_t kAlignment = 8u; variable
Darena_allocator.cc288 DCHECK_ALIGNED(begin_, kAlignment); in AllocFromNewArena()
/art/runtime/gc/accounting/
Dspace_bitmap.cc34 template<size_t kAlignment>
35 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) { in ComputeBitmapSize()
39 const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT; in ComputeBitmapSize()
45 template<size_t kAlignment>
46 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) { in ComputeHeapSize()
47 return bitmap_bytes * kBitsPerByte * kAlignment; in ComputeHeapSize()
50 template<size_t kAlignment>
51 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap()
60 template<size_t kAlignment>
61 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, in SpaceBitmap()
[all …]
Dspace_bitmap-inl.h33 template<size_t kAlignment>
34 inline bool SpaceBitmap<kAlignment>::AtomicTestAndSet(const mirror::Object* obj) { in AtomicTestAndSet()
55 template<size_t kAlignment>
56 inline bool SpaceBitmap<kAlignment>::Test(const mirror::Object* obj) const { in Test()
66 template<size_t kAlignment>
68 inline void SpaceBitmap<kAlignment>::VisitMarkedRange(uintptr_t visit_begin, in VisitMarkedRange()
73 for (uintptr_t i = visit_begin; i < visit_end; i += kAlignment) { in VisitMarkedRange()
89 const size_t bit_start = (offset_start / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange()
90 const size_t bit_end = (offset_end / kAlignment) % kBitsPerIntPtrT; in VisitMarkedRange()
115 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment); in VisitMarkedRange()
[all …]
Dbitmap.cc82 template<size_t kAlignment>
83 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::Create( in Create()
85 CHECK_ALIGNED(cover_begin, kAlignment); in Create()
86 CHECK_ALIGNED(cover_end, kAlignment); in Create()
87 const size_t num_bits = (cover_end - cover_begin) / kAlignment; in Create()
93 template<size_t kAlignment>
94 MemoryRangeBitmap<kAlignment>* MemoryRangeBitmap<kAlignment>::CreateFromMemMap( in CreateFromMemMap()
Dspace_bitmap.h40 template<size_t kAlignment>
68 return offset / kAlignment / kBitsPerIntPtrT; in OffsetToIndex()
75 return static_cast<T>(index * kAlignment * kBitsPerIntPtrT); in IndexToOffset()
82 return (offset / kAlignment) % kBitsPerIntPtrT; in OffsetBitIndex()
142 for (; visit_begin < visit_end; visit_begin += kAlignment) { in VisitRange()
260 template<size_t kAlignment>
261 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Dspace_bitmap_test.cc156 template <size_t kAlignment, typename TestFn>
169 size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest()
180 const size_t offset = RoundDown(r.next() % heap_capacity, kAlignment); in RunTest()
182 const size_t end = offset + RoundDown(r.next() % (remain + 1), kAlignment); in RunTest()
185 for (uintptr_t k = offset; k < end; k += kAlignment) { in RunTest()
199 template <size_t kAlignment>
212 RunTest<kAlignment>(count_test_fn); in RunTestCount()
223 template <size_t kAlignment>
249 RunTest<kAlignment>(order_test_fn); in RunTestOrder()
Dbitmap.h126 template<size_t kAlignment>
146 const uintptr_t addr = CoverBegin() + bit_index * kAlignment; in AddrFromBitIndex()
154 return (addr - CoverBegin()) / kAlignment; in BitIndexFromAddr()
182 cover_end_(begin + kAlignment * num_bits) {} in MemoryRangeBitmap()
Dcard_table.h42 template<size_t kAlignment> class SpaceBitmap;
/art/runtime/gc/space/
Dbump_pointer_space-inl.h31 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
48 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe()
69 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocNonvirtualWithoutAccounting()
Dlarge_object_space.cc267 return AlignSize() * FreeListSpace::kAlignment; in ByteSize()
272 DCHECK_ALIGNED(size, FreeListSpace::kAlignment); in SetByteSize()
273 alloc_size_ = (size / FreeListSpace::kAlignment) | (free ? kFlagFree : 0u); in SetByteSize()
312 return GetPrevFree() * FreeListSpace::kAlignment; in GetPrevFreeBytes()
316 DCHECK_ALIGNED(bytes, FreeListSpace::kAlignment); in SetPrevFreeBytes()
317 prev_free_ = bytes / FreeListSpace::kAlignment; in SetPrevFreeBytes()
356 CHECK_EQ(size % kAlignment, 0U); in Create()
375 CHECK_ALIGNED(space_capacity, kAlignment); in FreeListSpace()
376 const size_t alloc_info_size = sizeof(AllocationInfo) * (space_capacity / kAlignment); in FreeListSpace()
426 DCHECK_ALIGNED(obj, kAlignment); in Free()
[all …]
Dbump_pointer_space.cc102 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
150 bytes = RoundUp(bytes, kAlignment); in AllocBlock()
234 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
Dlarge_object_space.h185 static constexpr size_t kAlignment = kPageSize;
204 return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment; in GetSlotIndexForAddress()
210 return reinterpret_cast<uintptr_t>(Begin()) + slot * kAlignment; in GetAllocationAddressForSlot()
Dregion_space-inl.h36 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
55 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocNonvirtual()
104 DCHECK_ALIGNED(num_bytes, kAlignment); in Alloc()
309 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
317 DCHECK_ALIGNED(num_bytes, kAlignment); in AllocLarge()
Dbump_pointer_space.h168 static constexpr size_t kAlignment = 8; variable
199 static_assert(sizeof(BlockHeader) % kAlignment == 0,
Dregion_space.cc144 DCHECK(full_region_.Alloc(kAlignment, &ignored, nullptr, &ignored) == nullptr); in RegionSpace()
640 DCHECK_ALIGNED(obj, kAlignment); in CheckLiveBytesAgainstRegionBitmap()
653 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in CheckLiveBytesAgainstRegionBitmap()
669 IsPowerOfTwo(RegionSpace::kAlignment) && in PoisonUnevacuatedRange()
670 (kPoisonDeadObjectSize < RegionSpace::kAlignment), in PoisonUnevacuatedRange()
694 DCHECK_ALIGNED(obj, kAlignment); in PoisonDeadObjectsInUnevacuatedRegion()
943 prev_object_end = RoundUp(object_end, kAlignment); in GetLongestConsecutiveFreeBytes()
956 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
/art/runtime/gc/
Dheap-inl.h83 byte_count = RoundUp(byte_count, space::BumpPointerSpace::kAlignment); in AllocObjectWithAllocator()
267 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
347 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
358 static_assert(space::RegionSpace::kAlignment == space::BumpPointerSpace::kAlignment, in TryToAllocate()
360 static_assert(kObjectAlignment == space::BumpPointerSpace::kAlignment, in TryToAllocate()
/art/libdexfile/dex/
Dcompact_offset_table.h64 static constexpr size_t kAlignment = sizeof(uint32_t); variable
Ddex_file_structs.h197 static constexpr size_t kAlignment = sizeof(uint32_t); member
Dcompact_dex_file.h88 static constexpr size_t kAlignment = sizeof(uint16_t); member
/art/runtime/jit/
Djit_code_cache.h50 template<size_t kAlignment> class MemoryRangeBitmap;
62 template<size_t kAlignment> class MemoryRangeBitmap;
/art/dexlayout/
Dcompact_dex_writer.cc83 stream->AlignTo(CompactOffsetTable::kAlignment); in WriteDebugInfoOffsetTable()
147 CompactDexFile::CodeItem::kAlignment, in WriteCodeItem()
/art/compiler/optimizing/
Doptimizing_unit_test.h157 CHECK_ALIGNED(aligned_data, StandardDexFile::CodeItem::kAlignment);

12