/art/runtime/gc/ |
D | heap-inl.h | 223 size_t alloc_size, size_t* bytes_allocated, in TryToAllocate() argument 228 UNLIKELY(IsOutOfMemoryOnAllocation<kGrow>(allocator_type, alloc_size))) { in TryToAllocate() 235 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate() 236 ret = bump_pointer_space_->AllocNonvirtual(alloc_size); in TryToAllocate() 238 *bytes_allocated = alloc_size; in TryToAllocate() 239 *usable_size = alloc_size; in TryToAllocate() 240 *bytes_tl_bulk_allocated = alloc_size; in TryToAllocate() 247 size_t max_bytes_tl_bulk_allocated = rosalloc_space_->MaxBytesBulkAllocatedFor(alloc_size); in TryToAllocate() 252 ret = rosalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate() 257 rosalloc_space_->MaxBytesBulkAllocatedForNonvirtual(alloc_size); in TryToAllocate() [all …]
|
D | heap.cc | 1497 size_t alloc_size, size_t* bytes_allocated, in AllocateInternalWithGc() argument 1518 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1532 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1552 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1561 mirror::Object* ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1570 VLOG(gc) << "Forcing collection of SoftReferences for " << PrettySize(alloc_size) in AllocateInternalWithGc() 1579 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size, in AllocateInternalWithGc() 1595 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1628 if (!IsOutOfMemoryOnAllocation<false>(allocator, alloc_size)) { in AllocateInternalWithGc() 1640 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() [all …]
|
D | heap.h | 767 size_t alloc_size, size_t* bytes_allocated, 776 ALWAYS_INLINE bool IsOutOfMemoryOnAllocation(AllocatorType allocator_type, size_t alloc_size);
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 975 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in operator ()() local 976 collector_->region_space_->AddLiveBytes(ref, alloc_size); in operator ()() 1234 mirror::Object* ConcurrentCopying::AllocateInSkippedBlock(size_t alloc_size) { in AllocateInSkippedBlock() argument 1236 CHECK(IsAligned<space::RegionSpace::kAlignment>(alloc_size)); in AllocateInSkippedBlock() 1240 auto it = skipped_blocks_map_.lower_bound(alloc_size); in AllocateInSkippedBlock() 1247 CHECK_GE(byte_size, alloc_size); in AllocateInSkippedBlock() 1248 if (byte_size > alloc_size && byte_size - alloc_size < min_object_size) { in AllocateInSkippedBlock() 1250 it = skipped_blocks_map_.lower_bound(alloc_size + min_object_size); in AllocateInSkippedBlock() 1255 CHECK(IsAligned<space::RegionSpace::kAlignment>(it->first - alloc_size)); in AllocateInSkippedBlock() 1256 CHECK_GE(it->first - alloc_size, min_object_size) in AllocateInSkippedBlock() [all …]
|
D | mark_compact.cc | 89 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment); in ForwardObject() local 99 bump_pointer_ += alloc_size; in ForwardObject()
|
D | concurrent_copying.h | 228 mirror::Object* AllocateInSkippedBlock(size_t alloc_size)
|
/art/runtime/gc/space/ |
D | large_object_space.cc | 208 size_t alloc_size = it->second.mem_map->BaseSize(); in AllocationSize() local 210 *usable_size = alloc_size; in AllocationSize() 212 return alloc_size; in AllocationSize() 385 size_t alloc_size = cur_info->ByteSize(); in Walk() local 387 uint8_t* byte_end = byte_start + alloc_size; in Walk() 388 callback(byte_start, byte_end, alloc_size, arg); in Walk() 467 size_t alloc_size = info->ByteSize(); in AllocationSize() local 469 *usable_size = alloc_size; in AllocationSize() 471 return alloc_size; in AllocationSize()
|
D | large_object_space_test.cc | 123 size_t alloc_size, bytes_tl_bulk_allocated; in Run() local 124 mirror::Object* ptr = los_->Alloc(self, size_, &alloc_size, nullptr, in Run()
|
D | space_test.h | 441 size_t alloc_size; in SizeFootPrintGrowthLimitAndTrimBody() local 443 alloc_size = object_size; in SizeFootPrintGrowthLimitAndTrimBody() 445 alloc_size = test_rand(&rand_seed) % static_cast<size_t>(-object_size); in SizeFootPrintGrowthLimitAndTrimBody() 448 if (alloc_size < size_of_zero_length_byte_array) { in SizeFootPrintGrowthLimitAndTrimBody() 449 alloc_size = size_of_zero_length_byte_array; in SizeFootPrintGrowthLimitAndTrimBody() 457 object.Assign(Alloc(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody() 460 object.Assign(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody()
|
D | region_space.h | 211 void AddLiveBytes(mirror::Object* ref, size_t alloc_size) { in AddLiveBytes() argument 213 reg->AddLiveBytes(alloc_size); in AddLiveBytes()
|