Home
last modified time | relevance | path

Searched refs:Alloc (Results 1 – 25 of 126) sorted by relevance

123456

/art/libartbase/base/
Darena_allocator_test.cc79 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 8); in TEST_F()
80 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 2 / 8); in TEST_F()
87 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16); in TEST_F()
88 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 11 / 16); in TEST_F()
91 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 7 / 16); in TEST_F()
99 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16); in TEST_F()
100 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16); in TEST_F()
104 void* alloc3 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 5 / 16); in TEST_F()
112 void* alloc1 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 9 / 16); in TEST_F()
113 void* alloc2 = allocator.Alloc(arena_allocator::kArenaDefaultSize * 13 / 16); in TEST_F()
[all …]
Ddchecked_vector.h36 template <typename T, typename Alloc = std::allocator<T>>
37 class dchecked_vector : private std::vector<T, Alloc> {
41 using Base = std::vector<T, Alloc>;
195 template <typename T, typename Alloc>
196 void swap(dchecked_vector<T, Alloc>& lhs, dchecked_vector<T, Alloc>& rhs) { in swap()
201 template <typename T, typename Alloc>
202 bool operator==(const dchecked_vector<T, Alloc>& lhs, const dchecked_vector<T, Alloc>& rhs) {
205 template <typename T, typename Alloc>
206 bool operator!=(const dchecked_vector<T, Alloc>& lhs, const dchecked_vector<T, Alloc>& rhs) {
209 template <typename T, typename Alloc>
[all …]
Darena_object.h34 return allocator->Alloc(size, kAllocKind);
38 return allocator->Alloc(size, kAllocKind);
58 return allocator->Alloc(size, kAllocKind);
62 return allocator->Alloc(size, kAllocKind);
Dscoped_arena_allocator.h97 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { in Alloc() function
153 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
155 return arena_stack_->Alloc(bytes, kind);
159 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
165 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Darena_bit_vector.cc57 void* storage = allocator->template Alloc<ArenaBitVectorAllocator>(kind); in Create()
66 void* Alloc(size_t size) override { in Alloc() function in art::ArenaBitVectorAllocator
67 return allocator_->Alloc(size, this->Kind()); in Alloc()
Dallocator.cc33 void* Alloc(size_t size) override { in Alloc() function in art::CallocAllocator
52 void* Alloc([[maybe_unused]] size_t size) override { in Alloc() function in art::NoopAllocator
Dhash_map.h77 class Alloc = std::allocator<std::pair<Key, Value>>>
82 Alloc> {
88 Alloc>;
Darena_allocator.h269 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
330 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
337 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
343 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Darena_bit_vector.h40 void* storage = allocator->template Alloc<ArenaBitVector>(kind);
/art/compiler/utils/
Ddedupe_set-inl.h39 typename Alloc,
43 struct DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Stats {
52 typename Alloc,
56 class DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Shard {
58 Shard(const Alloc& alloc, const std::string& lock_name)
183 Alloc alloc_;
191 typename Alloc,
195 const StoreKey* DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::Add(
213 typename Alloc,
217 DedupeSet<InKey, StoreKey, Alloc, HashType, HashFunc, kShard>::DedupeSet(const char* set_name,
[all …]
Ddedupe_set.h35 typename Alloc,
44 DedupeSet(const char* set_name, const Alloc& alloc);
/art/runtime/mirror/
Dobject_array-alloc-inl.h36 inline ObjPtr<ObjectArray<T>> ObjectArray<T>::Alloc(Thread* self, in Alloc() function
40 ObjPtr<Array> array = Array::Alloc(self, in Alloc()
54 inline ObjPtr<ObjectArray<T>> ObjectArray<T>::Alloc(Thread* self, in Alloc() function
57 return Alloc(self, in Alloc()
72 ObjPtr<ObjectArray<T>> new_array = Alloc(self, h_this->GetClass(), new_length, allocator_type); in CopyOf()
Dclass-alloc-inl.h50 inline ObjPtr<Object> Class::Alloc(Thread* self, gc::AllocatorType allocator_type) { in Alloc() function
78 return Alloc(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); in AllocObject()
82 return Alloc(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator()); in AllocNonMovableObject()
Darray.cc58 Array::Alloc(self, array_class.Get(), array_length, component_size_shift, allocator_type))); in RecursiveCreateMultiArray()
123 ObjPtr<PrimitiveArray<T>> PrimitiveArray<T>::Alloc(Thread* self, size_t length) { in Alloc() function in art::mirror::PrimitiveArray
126 ObjPtr<Array> raw_array = Array::Alloc(self, in Alloc()
153 Alloc(self, klass, new_length, component_shift, allocator_type); // Invalidates klass. in CopyOf()
Dstring-alloc-inl.h195 inline ObjPtr<String> String::Alloc(Thread* self, in Alloc() function
242 return Alloc<kIsInstrumented>(self, length_with_flag, allocator_type, visitor); in AllocEmptyString()
260 return Alloc<kIsInstrumented>(self, length_with_flag, allocator_type, visitor); in AllocFromByteArray()
284 return Alloc<kIsInstrumented>(self, length_with_flag, allocator_type, visitor); in AllocFromUtf16ByteArray()
299 return Alloc<kIsInstrumented>(self, length_with_flag, allocator_type, visitor); in AllocFromCharArray()
313 return Alloc<kIsInstrumented>(self, length_with_flag, allocator_type, visitor); in AllocFromString()
Dobject_test.cc86 return mirror::ObjectArray<T>::Alloc( in AllocObjectArray()
165 Array::Alloc(soa.Self(), c.Get(), 1, c->GetComponentSizeShift(), allocator_type)); in TEST_F()
170 a.Assign(Array::Alloc(soa.Self(), c.Get(), 1, c->GetComponentSizeShift(), allocator_type)); in TEST_F()
175 a.Assign(Array::Alloc(soa.Self(), c.Get(), 1, c->GetComponentSizeShift(), allocator_type)); in TEST_F()
186 Array::Alloc</*kIsInstrumented=*/ true, /*kFillUsable=*/ true>( in TEST_F()
192 a.Assign(Array::Alloc</*kIsInstrumented=*/ true, /*kFillUsable=*/ true>( in TEST_F()
198 a.Assign(Array::Alloc</*kIsInstrumented=*/ true, /*kFillUsable=*/ true>( in TEST_F()
204 a.Assign(Array::Alloc</*kIsInstrumented=*/ true, /*kFillUsable=*/ true>( in TEST_F()
216 Handle<ArrayT> a = hs.NewHandle(ArrayT::Alloc(soa.Self(), 2)); in TestPrimitiveArray()
265 hs.NewHandle(ObjPtr<PointerArray>::DownCast<Array>(IntArray::Alloc(soa.Self(), 1))); in TEST_F()
[all …]
/art/runtime/gc/space/
Dspace_create_test.cc102 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space, in TEST_P()
115 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused); in TEST_P()
133 mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &unused, nullptr, &unused); in TEST_P()
166 EXPECT_TRUE(space->Alloc(self, 1U * MB, &unused, nullptr, &unused) != nullptr); in TEST_P()
187 ptr1.Assign(Alloc(space, in TEST_P()
200 ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused); in TEST_P()
236 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space, in TEST_P()
249 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused); in TEST_P()
267 mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &unused, nullptr, &unused); in TEST_P()
314 lots_of_objects[i] = Alloc(space, in TEST_P()
Dlarge_object_space_test.cc63 mirror::Object* obj = los->Alloc(self, request_size, &allocation_size, nullptr, in LargeObjectTest()
115 mirror::Object* obj = los->Alloc(self, 100 * MB, &bytes_allocated, nullptr, in LargeObjectTest()
134 mirror::Object* ptr = los_->Alloc(self, size_, &alloc_size, nullptr, in Run()
Dregion_space-inl.h29 inline mirror::Object* RegionSpace::Alloc([[maybe_unused]] Thread* self, in Alloc() function
45 return Alloc(self, num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); in AllocThreadUnsafe()
57 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, in AllocNonvirtual()
67 obj = (kForEvac ? evac_region_ : current_region_)->Alloc(num_bytes, in AllocNonvirtual()
76 obj = r->Alloc(num_bytes, bytes_allocated, usable_size, bytes_tl_bulk_allocated); in AllocNonvirtual()
97 inline mirror::Object* RegionSpace::Region::Alloc(size_t num_bytes, in Alloc() function
/art/runtime/
Dlinear_alloc-inl.h89 inline void* LinearAlloc::Alloc(Thread* self, size_t size, LinearAllocKind kind) { in Alloc() function
93 TrackingHeader* storage = new (allocator_.Alloc(size)) TrackingHeader(size, kind); in Alloc()
97 return allocator_.Alloc(size); in Alloc()
123 ptr = static_cast<uint8_t*>(allocator_.Alloc(required_size)); in AllocAlign16()
Dreference_table_test.cc118 Handle<mirror::ShortArray> o2 = hs.NewHandle(mirror::ShortArray::Alloc(soa.Self(), 0)); in TEST_F()
298 Handle<mirror::ByteArray> b1_1 = hs.NewHandle(mirror::ByteArray::Alloc(soa.Self(), 1)); in TEST_F()
300 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2)); in TEST_F()
302 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2)); in TEST_F()
303 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 1)); in TEST_F()
304 rt.Add(mirror::ByteArray::Alloc(soa.Self(), 2)); in TEST_F()
307 rt.Add(mirror::CharArray::Alloc(soa.Self(), 0)); in TEST_F()
Dlinear_alloc.h81 void* Alloc(Thread* self, size_t size, LinearAllocKind kind) REQUIRES(!lock_);
91 return reinterpret_cast<T*>(Alloc(self, elements * sizeof(T), kind)); in AllocArray()
/art/test/1000-non-moving-space-stress/src-art/
DMain.java32 $noinline$Alloc(runtime); in main()
75 static void $noinline$Alloc(VMRuntime runtime) { in $noinline$Alloc()
/art/dex2oat/linker/riscv64/
Drelative_patcher_riscv64.h60 template <typename Alloc>
61 uint32_t GetInsn(std::vector<uint8_t, Alloc>* code, uint32_t offset);
/art/dex2oat/linker/arm64/
Drelative_patcher_arm64.h66 template <typename Alloc>
67 static uint32_t GetInsn(std::vector<uint8_t, Alloc>* code, uint32_t offset);

123456