Lines Matching refs:alloc_size

223                                            size_t alloc_size, size_t* bytes_allocated,  in TryToAllocate()  argument
228 UNLIKELY(IsOutOfMemoryOnAllocation<kGrow>(allocator_type, alloc_size))) { in TryToAllocate()
235 alloc_size = RoundUp(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
236 ret = bump_pointer_space_->AllocNonvirtual(alloc_size); in TryToAllocate()
238 *bytes_allocated = alloc_size; in TryToAllocate()
239 *usable_size = alloc_size; in TryToAllocate()
240 *bytes_tl_bulk_allocated = alloc_size; in TryToAllocate()
247 size_t max_bytes_tl_bulk_allocated = rosalloc_space_->MaxBytesBulkAllocatedFor(alloc_size); in TryToAllocate()
252 ret = rosalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
257 rosalloc_space_->MaxBytesBulkAllocatedForNonvirtual(alloc_size); in TryToAllocate()
263 DCHECK(!rosalloc_space_->CanAllocThreadLocal(self, alloc_size)); in TryToAllocate()
265 ret = rosalloc_space_->AllocNonvirtual(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
273 ret = dlmalloc_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
277 ret = dlmalloc_space_->AllocNonvirtual(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
283 ret = non_moving_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
288 ret = large_object_space_->Alloc(self, alloc_size, bytes_allocated, usable_size, in TryToAllocate()
297 DCHECK_ALIGNED(alloc_size, space::BumpPointerSpace::kAlignment); in TryToAllocate()
298 if (UNLIKELY(self->TlabSize() < alloc_size)) { in TryToAllocate()
299 const size_t new_tlab_size = alloc_size + kDefaultTLABSize; in TryToAllocate()
313 ret = self->AllocTlab(alloc_size); in TryToAllocate()
315 *bytes_allocated = alloc_size; in TryToAllocate()
316 *usable_size = alloc_size; in TryToAllocate()
321 alloc_size = RoundUp(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
322 ret = region_space_->AllocNonvirtual<false>(alloc_size, bytes_allocated, usable_size, in TryToAllocate()
328 DCHECK_ALIGNED(alloc_size, space::RegionSpace::kAlignment); in TryToAllocate()
329 if (UNLIKELY(self->TlabSize() < alloc_size)) { in TryToAllocate()
330 if (space::RegionSpace::kRegionSize >= alloc_size) { in TryToAllocate()
336 ret = region_space_->AllocNonvirtual<false>(alloc_size, bytes_allocated, usable_size, in TryToAllocate()
344 if (!IsOutOfMemoryOnAllocation<kGrow>(allocator_type, alloc_size)) { in TryToAllocate()
345 ret = region_space_->AllocNonvirtual<false>(alloc_size, bytes_allocated, usable_size, in TryToAllocate()
355 if (LIKELY(!IsOutOfMemoryOnAllocation<kGrow>(allocator_type, alloc_size))) { in TryToAllocate()
356 ret = region_space_->AllocNonvirtual<false>(alloc_size, bytes_allocated, usable_size, in TryToAllocate()
367 ret = self->AllocTlab(alloc_size); in TryToAllocate()
369 *bytes_allocated = alloc_size; in TryToAllocate()
370 *usable_size = alloc_size; in TryToAllocate()
405 inline bool Heap::IsOutOfMemoryOnAllocation(AllocatorType allocator_type, size_t alloc_size) { in IsOutOfMemoryOnAllocation() argument
406 size_t new_footprint = num_bytes_allocated_.LoadSequentiallyConsistent() + alloc_size; in IsOutOfMemoryOnAllocation()
417 << PrettySize(new_footprint) << " for a " << PrettySize(alloc_size) << " allocation"; in IsOutOfMemoryOnAllocation()