/art/runtime/gc/ |
D | heap-inl.h | 300 size_t alloc_size, in TryToAllocate() argument 307 UNLIKELY(IsOutOfMemoryOnAllocation(allocator_type, alloc_size, kGrow))) { in TryToAllocate() 314 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate() 315 ret = bump_pointer_space_->AllocNonvirtual(alloc_size); in TryToAllocate() 317 *bytes_allocated = alloc_size; in TryToAllocate() 318 *usable_size = alloc_size; in TryToAllocate() 319 *bytes_tl_bulk_allocated = alloc_size; in TryToAllocate() 326 size_t max_bytes_tl_bulk_allocated = rosalloc_space_->MaxBytesBulkAllocatedFor(alloc_size); in TryToAllocate() 332 ret = rosalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate() 337 rosalloc_space_->MaxBytesBulkAllocatedForNonvirtual(alloc_size); in TryToAllocate() [all …]
|
D | heap.cc | 1799 size_t alloc_size, in AllocateInternalWithGc() argument 1817 l->PreObjectAllocated(self, h_klass, &alloc_size); in AllocateInternalWithGc() 1842 mirror::Object* ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1873 alloc_size, bytes_allocated, in AllocateInternalWithGc() 1884 VLOG(gc) << "Forcing collection of SoftReferences for " << PrettySize(alloc_size) in AllocateInternalWithGc() 1899 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1923 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, in AllocateInternalWithGc() 1963 ThrowOutOfMemoryError(self, alloc_size, allocator); in AllocateInternalWithGc() 2241 size_t alloc_size = RoundUp(obj_size, kObjectAlignment); in MarkNonForwardedObject() local 2244 auto it = bins_.lower_bound(alloc_size); in MarkNonForwardedObject() [all …]
|
D | heap.h | 875 size_t alloc_size); 880 size_t alloc_size, 1080 size_t alloc_size, 1088 size_t alloc_size, 1104 size_t alloc_size, 1259 static void VlogHeapGrowth(size_t max_allowed_footprint, size_t new_footprint, size_t alloc_size);
|
/art/runtime/javaheapprof/ |
D | javaheapsampler.cc | 77 size_t HeapSampler::GetSampleOffset(size_t alloc_size, in GetSampleOffset() argument 81 size_t exhausted_size = alloc_size + tlab_used; in GetSampleOffset() 110 size_t next_bytes_until_sample = bytes_until_sample - alloc_size; in GetSampleOffset() 113 << next_bytes_until_sample << " alloc= " << alloc_size; in GetSampleOffset()
|
D | javaheapsampler.h | 63 size_t GetSampleOffset(size_t alloc_size,
|
/art/runtime/gc/space/ |
D | space_test.h | 180 size_t alloc_size; in SizeFootPrintGrowthLimitAndTrimBody() local 182 alloc_size = object_size; in SizeFootPrintGrowthLimitAndTrimBody() 184 alloc_size = test_rand(&rand_seed) % static_cast<size_t>(-object_size); in SizeFootPrintGrowthLimitAndTrimBody() 187 if (alloc_size < size_of_zero_length_byte_array) { in SizeFootPrintGrowthLimitAndTrimBody() 188 alloc_size = size_of_zero_length_byte_array; in SizeFootPrintGrowthLimitAndTrimBody() 196 object.Assign(Alloc(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody() 199 object.Assign(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr, in SizeFootPrintGrowthLimitAndTrimBody()
|
D | large_object_space.cc | 212 size_t alloc_size = it->second.mem_map.BaseSize(); in AllocationSize() local 214 *usable_size = alloc_size; in AllocationSize() 216 return alloc_size; in AllocationSize() 402 size_t alloc_size = cur_info->ByteSize(); in Walk() local 404 uint8_t* byte_end = byte_start + alloc_size; in Walk() 405 callback(byte_start, byte_end, alloc_size, arg); in Walk() 493 size_t alloc_size = info->ByteSize(); in AllocationSize() local 495 *usable_size = alloc_size; in AllocationSize() 497 return alloc_size; in AllocationSize()
|
D | large_object_space_test.cc | 133 size_t alloc_size, bytes_tl_bulk_allocated; in Run() local 134 mirror::Object* ptr = los_->Alloc(self, size_, &alloc_size, nullptr, in Run()
|
D | region_space.h | 319 void AddLiveBytes(mirror::Object* ref, size_t alloc_size) { in AddLiveBytes() argument 321 reg->AddLiveBytes(alloc_size); in AddLiveBytes()
|
D | region_space.cc | 663 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in CheckLiveBytesAgainstRegionBitmap() local 664 live_bytes_recount += alloc_size; in CheckLiveBytesAgainstRegionBitmap()
|
/art/runtime/mirror/ |
D | string-alloc-inl.h | 174 size_t alloc_size = RoundUp(size, kObjectAlignment); in Alloc() local 197 alloc_size, in Alloc()
|
/art/runtime/gc/collector/ |
D | concurrent_copying.cc | 1178 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in AddLiveBytesAndScanRef() local 1179 region_space_->AddLiveBytes(ref, alloc_size); in AddLiveBytesAndScanRef() 3361 mirror::Object* ConcurrentCopying::AllocateInSkippedBlock(Thread* const self, size_t alloc_size) { in AllocateInSkippedBlock() argument 3363 CHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock() 3369 auto it = skipped_blocks_map_.lower_bound(alloc_size); in AllocateInSkippedBlock() 3375 CHECK_GE(byte_size, alloc_size); in AllocateInSkippedBlock() 3376 if (byte_size > alloc_size && byte_size - alloc_size < min_object_size) { in AllocateInSkippedBlock() 3378 it = skipped_blocks_map_.lower_bound(alloc_size + min_object_size); in AllocateInSkippedBlock() 3383 CHECK_ALIGNED(it->first - alloc_size, space::RegionSpace::kAlignment); in AllocateInSkippedBlock() 3384 CHECK_GE(it->first - alloc_size, min_object_size) in AllocateInSkippedBlock() [all …]
|
D | concurrent_copying.h | 271 mirror::Object* AllocateInSkippedBlock(Thread* const self, size_t alloc_size)
|