/external/tensorflow/tensorflow/core/common_runtime/gpu/ |
D | pool_allocator_test.cc | 38 EXPECT_EQ(nullptr, pool.AllocateRaw(4 /*alignment*/, 0 /*num_bytes*/)); in TEST() 64 void* p0 = pool.AllocateRaw(4, 0); in TEST() 65 void* p4 = pool.AllocateRaw(4, 4); in TEST() 66 void* p12 = pool.AllocateRaw(4, 12); in TEST() 92 void* p = pool.AllocateRaw(alignment, 111); in TEST() 109 void* p = pool.AllocateRaw(4, 64 << i); in TEST() 121 void* p = pool.AllocateRaw(4, 64 << i); in TEST() 158 void* p1_16 = pool.AllocateRaw(4, 16); in TEST() 169 void* p2_16 = pool.AllocateRaw(4, 16); // Get it again. in TEST() 181 void* p3_4 = pool.AllocateRaw(4, 4); in TEST() [all …]
|
D | gpu_bfc_allocator_test.cc | 63 void* raw = a.AllocateRaw(1, s); in TEST() 100 void* raw = a.AllocateRaw(1, size); in TEST() 117 void* out_of_memory_ptr = a.AllocateRaw(1, (1 << 30) + 1); in TEST() 124 void* raw = a.AllocateRaw(1, size); in TEST() 255 void* raw = a.AllocateRaw(1, size); in TEST() 275 void* raw = a.AllocateRaw(1, size); in TEST() 313 void* amem = a.AllocateRaw(1, 1); in TEST() 314 void* bmem = b.AllocateRaw(1, 1 << 30); in TEST() 333 void* p = a.AllocateRaw(1, bytes); in BM_Allocation() 359 void* p = a.AllocateRaw(1, bytes); in BM_AllocationThreaded() [all …]
|
D | gpu_debug_allocator.cc | 87 void* GPUDebugAllocator::AllocateRaw(size_t alignment, size_t num_bytes) { in AllocateRaw() function in tensorflow::GPUDebugAllocator 89 void* allocated_ptr = base_allocator_->AllocateRaw(alignment, num_bytes); in AllocateRaw() 165 void* GPUNanResetAllocator::AllocateRaw(size_t alignment, size_t num_bytes) { in AllocateRaw() function in tensorflow::GPUNanResetAllocator 166 void* allocated_ptr = base_allocator_->AllocateRaw(alignment, num_bytes); in AllocateRaw()
|
D | gpu_allocator_retry_test.cc | 37 void* AllocateRaw(size_t alignment, size_t num_bytes) { in AllocateRaw() function in tensorflow::__anon55289c970111::FakeAllocator 38 return retry_.AllocateRaw( in AllocateRaw() 133 ptr = alloc_->AllocateRaw(16, 1); in LaunchConsumerThreads()
|
D | gpu_debug_allocator.h | 40 void* AllocateRaw(size_t alignment, size_t num_bytes) override; 70 void* AllocateRaw(size_t alignment, size_t num_bytes) override;
|
D | gpu_managed_allocator.h | 30 void* AllocateRaw(size_t alignment, size_t num_bytes) override;
|
D | gpu_managed_allocator.cc | 25 void* GpuManagedAllocator::AllocateRaw(size_t alignment, size_t num_bytes) { in AllocateRaw() function in tensorflow::GpuManagedAllocator
|
D | gpu_cudamalloc_allocator.h | 38 void* AllocateRaw(size_t alignment, size_t num_bytes) override;
|
/external/tensorflow/tensorflow/core/common_runtime/ |
D | scoped_allocator_mgr_test.cc | 140 sa_instances_[0]->AllocateRaw(0 /* alignment */, 512 * sizeof(float)); in TEST_F() 142 sa_instances_[1]->AllocateRaw(0 /* alignment */, 512 * sizeof(float)); in TEST_F() 157 char* ptr0 = static_cast<char*>(inst0->AllocateRaw(0, 512 * sizeof(float))); in TEST_F() 163 char* ptr1 = static_cast<char*>(inst1->AllocateRaw(0, 9 * sizeof(float))); in TEST_F() 167 char* ptr2 = static_cast<char*>(inst2->AllocateRaw(0, 512 * sizeof(float))); in TEST_F() 209 static_cast<char*>(sa_instances_[0]->AllocateRaw(0, 512 * sizeof(float))); in TEST_F() 215 EXPECT_EQ(nullptr, sa_instances_[1]->AllocateRaw(0, 256 * sizeof(float))); in TEST_F() 218 EXPECT_EQ(nullptr, sa_instances_[1]->AllocateRaw(0, 1024 * sizeof(float))); in TEST_F() 219 void* ptr1 = sa_instances_[1]->AllocateRaw(0, 512 * sizeof(float)); in TEST_F() 221 EXPECT_EQ(nullptr, sa_instances_[0]->AllocateRaw(0, 512 * sizeof(float))); in TEST_F()
|
D | scoped_allocator.h | 68 void* AllocateRaw(int32 field_index, size_t num_bytes) LOCKS_EXCLUDED(mu_); 99 void* AllocateRaw(size_t alignment, size_t num_bytes) 101 void* AllocateRaw(size_t alignment, size_t num_bytes, in AllocateRaw() function 103 return AllocateRaw(alignment, num_bytes); in AllocateRaw()
|
D | mkl_cpu_allocator.h | 66 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 224 inline void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 230 return small_size_allocator_->AllocateRaw(alignment, num_bytes); in AllocateRaw() 233 void* ptr = large_size_allocator_->AllocateRaw(alignment, num_bytes); in AllocateRaw() 279 return cpu_allocator()->AllocateRaw(kAlignment, size); in MallocHook()
|
D | scoped_allocator.cc | 56 void* ScopedAllocator::AllocateRaw(int32 field_index, size_t num_bytes) { in AllocateRaw() function in tensorflow::ScopedAllocator 166 void* ScopedAllocatorInstance::AllocateRaw(size_t alignment, size_t num_bytes) { in AllocateRaw() function in tensorflow::ScopedAllocatorInstance 167 void* ptr = scoped_allocator_->AllocateRaw(field_index_, num_bytes); in AllocateRaw()
|
D | process_state.h | 118 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 119 void* p = a_->AllocateRaw(alignment, num_bytes); in AllocateRaw()
|
D | bfc_allocator.h | 55 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 56 return AllocateRaw(alignment, num_bytes, AllocationAttributes()); in AllocateRaw() 59 void* AllocateRaw(size_t alignment, size_t num_bytes,
|
D | allocator_retry.h | 38 void* AllocateRaw(std::function<void*(size_t alignment, size_t num_bytes,
|
/external/tensorflow/tensorflow/core/framework/ |
D | tracking_allocator_test.cc | 30 void* AllocateRaw(size_t /*alignment*/, size_t num_bytes) override { in AllocateRaw() function in tensorflow::TestableSizeTrackingAllocator 56 void* AllocateRaw(size_t /*alignment*/, size_t num_bytes) override { in AllocateRaw() function in tensorflow::NoMemoryAllocator 74 void* p1 = ta->AllocateRaw(4, 4); in TEST() 76 void* p2 = ta->AllocateRaw(4, 12); in TEST() 91 p1 = ta->AllocateRaw(4, 4); in TEST() 97 p2 = ta->AllocateRaw(4, 12); in TEST() 123 void* p1 = ta->AllocateRaw(4, 12); in TEST() 125 void* p2 = ta->AllocateRaw(4, 4); in TEST() 149 void* p1 = ta->AllocateRaw(4, 12); in TEST()
|
D | allocator.h | 95 virtual void* AllocateRaw(size_t alignment, size_t num_bytes) = 0; 101 virtual void* AllocateRaw(size_t alignment, size_t num_bytes, in AllocateRaw() function 105 return AllocateRaw(alignment, num_bytes); in AllocateRaw() 132 void* p = AllocateRaw(kAllocatorAlignment, sizeof(T) * num_elements, in Allocate() 300 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 301 return wrapped_->AllocateRaw(alignment, num_bytes); in AllocateRaw() 304 void* AllocateRaw(size_t alignment, size_t num_bytes, in AllocateRaw() function 306 return wrapped_->AllocateRaw(alignment, num_bytes, allocation_attr); in AllocateRaw()
|
D | tracking_allocator.h | 59 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function 60 return AllocateRaw(alignment, num_bytes, AllocationAttributes()); in AllocateRaw() 62 void* AllocateRaw(size_t alignment, size_t num_bytes,
|
D | allocator_test.cc | 93 void* raw = a->AllocateRaw(1, s); in TEST() 184 void* p = a->AllocateRaw(1, bytes); in BM_Allocation()
|
D | tracking_allocator.cc | 32 void* TrackingAllocator::AllocateRaw( in AllocateRaw() function in tensorflow::TrackingAllocator 35 void* ptr = allocator_->AllocateRaw(alignment, num_bytes, allocation_attr); in AllocateRaw()
|
D | allocator.cc | 112 void* AllocateRaw(size_t alignment, size_t num_bytes) override { in AllocateRaw() function in tensorflow::__anon18af429f0111::CPUAllocator 196 return cpu_allocator_->AllocateRaw(alignment, num_bytes); in Alloc()
|
/external/v8/src/heap/ |
D | heap-inl.h | 118 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, in AllocateRaw() function 145 allocation = new_lo_space_->AllocateRaw(size_in_bytes); in AllocateRaw() 147 allocation = new_space_->AllocateRaw(size_in_bytes, alignment); in AllocateRaw() 159 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); in AllocateRaw() 161 allocation = old_space_->AllocateRaw(size_in_bytes, alignment); in AllocateRaw() 167 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); in AllocateRaw() 171 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); in AllocateRaw() 180 allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment); in AllocateRaw()
|
D | setup-heap-internal.cc | 103 AllocateRaw(Map::kSize, is_js_object ? MAP_SPACE : RO_SPACE); in AllocateMap() 118 AllocationResult allocation = AllocateRaw(Map::kSize, RO_SPACE); in AllocatePartialMap() 163 AllocationResult allocation = AllocateRaw(size, space); in Allocate() 177 AllocationResult allocation = AllocateRaw( in AllocateEmptyFixedTypedArray() 235 AllocationResult alloc = AllocateRaw(FixedArray::SizeFor(0), RO_SPACE); in CreateInitialMaps() 243 AllocationResult alloc = AllocateRaw(WeakFixedArray::SizeFor(0), RO_SPACE); in CreateInitialMaps() 253 AllocateRaw(WeakArrayList::SizeForCapacity(0), RO_SPACE); in CreateInitialMaps() 308 if (!AllocateRaw(size, RO_SPACE).To(&obj)) return false; in CreateInitialMaps() 425 AllocationResult alloc = AllocateRaw(Cell::kSize, OLD_SPACE); in CreateInitialMaps() 505 AllocationResult alloc = AllocateRaw(FixedArray::SizeFor(0), RO_SPACE); in CreateInitialMaps() [all …]
|
D | local-allocator-inl.h | 22 return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size, in Allocate() 26 ->AllocateRaw(object_size, alignment); in Allocate()
|
/external/v8/src/snapshot/ |
D | default-deserializer-allocator.cc | 30 Address DefaultDeserializerAllocator::AllocateRaw(AllocationSpace space, in AllocateRaw() function in v8::internal::DefaultDeserializerAllocator 38 AllocationResult result = lo_space->AllocateRaw(size, exec); in AllocateRaw() 68 address = AllocateRaw(space, reserved); in Allocate() 82 return AllocateRaw(space, size); in Allocate()
|