Home
last modified time | relevance | path

Searched refs:alloc_size (Results 1 – 11 of 11) sorted by relevance

/art/runtime/gc/
Dheap-inl.h238 size_t alloc_size, in TryToAllocate() argument
245 UNLIKELY(IsOutOfMemoryOnAllocation<kGrow>(allocator_type, alloc_size))) { in TryToAllocate()
252 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
253 ret = bump_pointer_space_->AllocNonvirtual(alloc_size); in TryToAllocate()
255 *bytes_allocated = alloc_size; in TryToAllocate()
256 *usable_size = alloc_size; in TryToAllocate()
257 *bytes_tl_bulk_allocated = alloc_size; in TryToAllocate()
264 size_t max_bytes_tl_bulk_allocated = rosalloc_space_->MaxBytesBulkAllocatedFor(alloc_size); in TryToAllocate()
269 ret = rosalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
274 rosalloc_space_->MaxBytesBulkAllocatedForNonvirtual(alloc_size); in TryToAllocate()
[all …]
Dheap.cc1687 size_t alloc_size, in AllocateInternalWithGc() argument
1710 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1725 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1746 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1755 mirror::Object* ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1764 VLOG(gc) << "Forcing collection of SoftReferences for " << PrettySize(alloc_size) in AllocateInternalWithGc()
1774 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size, in AllocateInternalWithGc()
1795 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
1828 if (!IsOutOfMemoryOnAllocation<false>(allocator, alloc_size)) { in AllocateInternalWithGc()
1845 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc()
[all …]
Dheap.h882 size_t alloc_size,
892 ALWAYS_INLINE bool IsOutOfMemoryOnAllocation(AllocatorType allocator_type, size_t alloc_size);
/art/runtime/gc/space/
Dspace_test.h182 size_t alloc_size; in SizeFootPrintGrowthLimitAndTrimBody() local
184 alloc_size = object_size; in SizeFootPrintGrowthLimitAndTrimBody()
186 alloc_size = test_rand(&rand_seed) % static_cast<size_t>(-object_size); in SizeFootPrintGrowthLimitAndTrimBody()
189 if (alloc_size < size_of_zero_length_byte_array) { in SizeFootPrintGrowthLimitAndTrimBody()
190 alloc_size = size_of_zero_length_byte_array; in SizeFootPrintGrowthLimitAndTrimBody()
198 object.Assign(Alloc(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody()
201 object.Assign(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody()
Dlarge_object_space.cc213 size_t alloc_size = it->second.mem_map->BaseSize(); in AllocationSize() local
215 *usable_size = alloc_size; in AllocationSize()
217 return alloc_size; in AllocationSize()
390 size_t alloc_size = cur_info->ByteSize(); in Walk() local
392 uint8_t* byte_end = byte_start + alloc_size; in Walk()
393 callback(byte_start, byte_end, alloc_size, arg); in Walk()
472 size_t alloc_size = info->ByteSize(); in AllocationSize() local
474 *usable_size = alloc_size; in AllocationSize()
476 return alloc_size; in AllocationSize()
Dlarge_object_space_test.cc123 size_t alloc_size, bytes_tl_bulk_allocated; in Run() local
124 mirror::Object* ptr = los_->Alloc(self, size_, &alloc_size, nullptr, in Run()
Dregion_space.h213 void AddLiveBytes(mirror::Object* ref, size_t alloc_size) { in AddLiveBytes() argument
215 reg->AddLiveBytes(alloc_size); in AddLiveBytes()
/art/runtime/gc/collector/
Dconcurrent_copying.cc1392 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in operator ()() local
1393 collector_->region_space_->AddLiveBytes(ref, alloc_size); in operator ()()
1736 mirror::Object* ConcurrentCopying::AllocateInSkippedBlock(size_t alloc_size) { in AllocateInSkippedBlock() argument
1738 CHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1742 auto it = skipped_blocks_map_.lower_bound(alloc_size); in AllocateInSkippedBlock()
1749 CHECK_GE(byte_size, alloc_size); in AllocateInSkippedBlock()
1750 if (byte_size > alloc_size && byte_size - alloc_size < min_object_size) { in AllocateInSkippedBlock()
1752 it = skipped_blocks_map_.lower_bound(alloc_size + min_object_size); in AllocateInSkippedBlock()
1757 CHECK_ALIGNED(it->first - alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock()
1758 CHECK_GE(it->first - alloc_size, min_object_size) in AllocateInSkippedBlock()
[all …]
Dmark_compact.cc74 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment); in ForwardObject() local
84 bump_pointer_ += alloc_size; in ForwardObject()
Dconcurrent_copying.h167 mirror::Object* AllocateInSkippedBlock(size_t alloc_size)
/art/runtime/mirror/
Dstring-inl.h167 size_t alloc_size = RoundUp(size, kObjectAlignment); in Alloc() local
186 heap->AllocObjectWithAllocator<kIsInstrumented, true>(self, string_class, alloc_size, in Alloc()