Home
last modified time | relevance | path

Searched refs:usable_size (Results 1 – 25 of 25) sorted by relevance

/art/runtime/gc/space/
Dbump_pointer_space-inl.h27 size_t* usable_size) { in Alloc() argument
32 if (usable_size != nullptr) { in Alloc()
33 *usable_size = num_bytes; in Alloc()
41 size_t* usable_size) { in AllocThreadUnsafe() argument
54 if (UNLIKELY(usable_size != nullptr)) { in AllocThreadUnsafe()
55 *usable_size = num_bytes; in AllocThreadUnsafe()
84 inline size_t BumpPointerSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) in AllocationSizeNonvirtual() argument
87 if (usable_size != nullptr) { in AllocationSizeNonvirtual()
88 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
Ddlmalloc_space-inl.h30 size_t* usable_size) { in AllocNonvirtual() argument
34 obj = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size); in AllocNonvirtual()
43 inline size_t DlMallocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { in AllocationSizeNonvirtual() argument
46 if (usable_size != nullptr) { in AllocationSizeNonvirtual()
47 *usable_size = size; in AllocationSizeNonvirtual()
54 size_t* usable_size) { in AllocWithoutGrowthLocked() argument
61 size_t allocation_size = AllocationSizeNonvirtual(result, usable_size); in AllocWithoutGrowthLocked()
Drosalloc_space.h50 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_);
52 size_t* usable_size) OVERRIDE { in Alloc() argument
53 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size); in Alloc()
56 size_t* usable_size) in AllocThreadUnsafe() argument
58 return AllocNonvirtualThreadUnsafe(self, num_bytes, bytes_allocated, usable_size); in AllocThreadUnsafe()
60 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { in AllocationSize() argument
61 return AllocationSizeNonvirtual(obj, usable_size); in AllocationSize()
69 size_t* usable_size) { in AllocNonvirtual() argument
71 return AllocCommon(self, num_bytes, bytes_allocated, usable_size); in AllocNonvirtual()
74 size_t* bytes_allocated, size_t* usable_size) { in AllocNonvirtualThreadUnsafe() argument
[all …]
Drosalloc_space-inl.h28 inline size_t RosAllocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { in AllocationSizeNonvirtual() argument
43 if (usable_size != nullptr) { in AllocationSizeNonvirtual()
44 *usable_size = size_by_size; in AllocationSizeNonvirtual()
51 size_t* bytes_allocated, size_t* usable_size) { in AllocCommon() argument
66 if (usable_size != nullptr) { in AllocCommon()
67 *usable_size = rosalloc_size; in AllocCommon()
Dvalgrind_malloc_space-inl.h35 size_t* usable_size) { in AllocWithGrowth() argument
37 bytes_allocated, usable_size); in AllocWithGrowth()
52 size_t* usable_size) { in Alloc() argument
54 usable_size); in Alloc()
67 size_t ValgrindMallocSpace<S, A>::AllocationSize(mirror::Object* obj, size_t* usable_size) { in AllocationSize() argument
69 reinterpret_cast<byte*>(obj) - kValgrindRedZoneBytes), usable_size); in AllocationSize()
78 size_t usable_size = 0; in Free() local
79 AllocationSize(ptr, &usable_size); in Free()
80 VALGRIND_MAKE_MEM_UNDEFINED(obj_with_rdz, usable_size); in Free()
Ddlmalloc_space.h51 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_);
54 size_t* usable_size) OVERRIDE LOCKS_EXCLUDED(lock_) { in Alloc() argument
55 return AllocNonvirtual(self, num_bytes, bytes_allocated, usable_size); in Alloc()
58 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { in AllocationSize() argument
59 return AllocationSizeNonvirtual(obj, usable_size); in AllocationSize()
78 size_t* usable_size) LOCKS_EXCLUDED(lock_);
81 size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size);
137 size_t* usable_size)
Dlarge_object_space.cc41 size_t* usable_size) OVERRIDE { in Alloc() argument
44 usable_size); in Alloc()
50 if (usable_size != nullptr) { in Alloc()
51 *usable_size = num_bytes; // Since we have redzones, shrink the usable size. in Alloc()
56 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE { in AllocationSize() argument
59 return LargeObjectMapSpace::AllocationSize(object_with_rdz, usable_size); in AllocationSize()
111 size_t* bytes_allocated, size_t* usable_size) { in Alloc() argument
131 if (usable_size != nullptr) { in Alloc()
132 *usable_size = allocation_size; in Alloc()
157 size_t LargeObjectMapSpace::AllocationSize(mirror::Object* obj, size_t* usable_size) { in AllocationSize() argument
[all …]
Dvalgrind_malloc_space.h34 size_t* usable_size) OVERRIDE;
36 size_t* usable_size) OVERRIDE;
38 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE;
Dbump_pointer_space.h50 size_t* usable_size) OVERRIDE;
53 size_t* usable_size)
60 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE in AllocationSize() argument
62 return AllocationSizeNonvirtual(obj, usable_size); in AllocationSize()
74 size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
Dlarge_object_space.h119 size_t AllocationSize(mirror::Object* obj, size_t* usable_size);
121 size_t* usable_size);
147 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE
150 size_t* usable_size) OVERRIDE;
Dzygote_space.h49 size_t* usable_size) OVERRIDE;
51 size_t AllocationSize(mirror::Object* obj, size_t* usable_size) OVERRIDE;
Dmalloc_space.h58 size_t* bytes_allocated, size_t* usable_size) = 0;
61 size_t* usable_size) = 0;
64 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) = 0;
Dspace_test.h64 size_t* bytes_allocated, size_t* usable_size) in Alloc() argument
68 mirror::Object* obj = alloc_space->Alloc(self, bytes, bytes_allocated, usable_size); in Alloc()
76 size_t* bytes_allocated, size_t* usable_size) in AllocWithGrowth() argument
80 mirror::Object* obj = alloc_space->AllocWithGrowth(self, bytes, bytes_allocated, usable_size); in AllocWithGrowth()
348 size_t allocation_size, usable_size; in AllocAndFreeListTestBody() local
351 &usable_size); in AllocAndFreeListTestBody()
355 EXPECT_EQ(usable_size, computed_usable_size); in AllocAndFreeListTestBody()
363 size_t allocation_size, usable_size; in AllocAndFreeListTestBody() local
364 lots_of_objects[i] = AllocWithGrowth(space, self, 1024, &allocation_size, &usable_size); in AllocAndFreeListTestBody()
368 EXPECT_EQ(usable_size, computed_usable_size); in AllocAndFreeListTestBody()
Dspace.h200 size_t* usable_size) = 0;
204 size_t* usable_size) in AllocThreadUnsafe() argument
206 return Alloc(self, num_bytes, bytes_allocated, usable_size); in AllocThreadUnsafe()
210 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) = 0;
Dzygote_space.cc79 size_t* usable_size) { in Alloc() argument
84 size_t ZygoteSpace::AllocationSize(mirror::Object* obj, size_t* usable_size) { in AllocationSize() argument
Ddlmalloc_space.cc129 size_t* bytes_allocated, size_t* usable_size) { in AllocWithGrowth() argument
137 result = AllocWithoutGrowthLocked(self, num_bytes, bytes_allocated, usable_size); in AllocWithGrowth()
Drosalloc_space.cc147 size_t* bytes_allocated, size_t* usable_size) { in AllocWithGrowth() argument
155 result = AllocCommon(self, num_bytes, bytes_allocated, usable_size); in AllocWithGrowth()
/art/runtime/gc/
Dheap-inl.h67 size_t usable_size; in AllocObjectWithAllocator() local
84 usable_size = bytes_allocated; in AllocObjectWithAllocator()
85 pre_fence_visitor(obj, usable_size); in AllocObjectWithAllocator()
89 &usable_size); in AllocObjectWithAllocator()
92 obj = AllocateInternalWithGc(self, allocator, byte_count, &bytes_allocated, &usable_size, in AllocObjectWithAllocator()
106 DCHECK_GT(usable_size, 0u); in AllocObjectWithAllocator()
126 pre_fence_visitor(obj, usable_size); in AllocObjectWithAllocator()
132 CHECK_LE(obj->SizeOf(), usable_size); in AllocObjectWithAllocator()
197 size_t* usable_size) { in TryToAllocate() argument
210 *usable_size = alloc_size; in TryToAllocate()
[all …]
Dheap.cc1282 size_t* usable_size, in AllocateInternalWithGc() argument
1302 usable_size); in AllocateInternalWithGc()
1316 usable_size); in AllocateInternalWithGc()
1336 usable_size); in AllocateInternalWithGc()
1345 usable_size); in AllocateInternalWithGc()
1362 ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size); in AllocateInternalWithGc()
1378 usable_size); in AllocateInternalWithGc()
1422 usable_size); in AllocateInternalWithGc()
Dheap.h653 size_t* bytes_allocated, size_t* usable_size,
672 size_t* usable_size)
/art/runtime/mirror/
Darray-inl.h87 void operator()(Object* obj, size_t usable_size) const in operator()
89 UNUSED(usable_size); in operator()
110 void operator()(Object* obj, size_t usable_size) const in operator()
115 int32_t length = (usable_size - header_size_) / component_size_; in operator()
Dobject.cc109 void operator()(Object* obj, size_t usable_size) const in operator ()()
111 UNUSED(usable_size); in operator ()()
Dclass-inl.h737 mirror::Object* obj, size_t usable_size) const { in operator()
738 DCHECK_LE(class_size_, usable_size); in operator()
Dclass.cc845 void operator()(Object* obj, size_t usable_size) const in operator ()()
847 UNUSED(usable_size); in operator ()()
Dclass.h1039 void operator()(mirror::Object* obj, size_t usable_size) const