/external/jemalloc_new/test/unit/ |
D | slab.c | 9 const bin_info_t *bin_info = &bin_infos[binind]; in TEST_BEGIN() local 10 extent_init(&slab, NULL, mallocx(bin_info->slab_size, in TEST_BEGIN() 11 MALLOCX_LG_ALIGN(LG_PAGE)), bin_info->slab_size, true, in TEST_BEGIN() 15 for (regind = 0; regind < bin_info->nregs; regind++) { in TEST_BEGIN() 17 (bin_info->reg_size * regind)); in TEST_BEGIN() 21 bin_info->reg_size); in TEST_BEGIN()
|
D | junk.c | 18 arena_dalloc_junk_small_intercept(void *ptr, const bin_info_t *bin_info) { in arena_dalloc_junk_small_intercept() argument 21 arena_dalloc_junk_small_orig(ptr, bin_info); in arena_dalloc_junk_small_intercept() 22 for (i = 0; i < bin_info->reg_size; i++) { in arena_dalloc_junk_small_intercept() 25 i, bin_info->reg_size); in arena_dalloc_junk_small_intercept()
|
/external/jemalloc_new/include/jemalloc/internal/ |
D | tcache_inlines.h | 171 cache_bin_info_t *bin_info; in tcache_dalloc_small() local 180 bin_info = &tcache_bin_info[binind]; in tcache_dalloc_small() 181 if (unlikely(bin->ncached == bin_info->ncached_max)) { in tcache_dalloc_small() 183 (bin_info->ncached_max >> 1)); in tcache_dalloc_small() 185 assert(bin->ncached < bin_info->ncached_max); in tcache_dalloc_small() 196 cache_bin_info_t *bin_info; in tcache_dalloc_large() local 206 bin_info = &tcache_bin_info[binind]; in tcache_dalloc_large() 207 if (unlikely(bin->ncached == bin_info->ncached_max)) { in tcache_dalloc_large() 209 (bin_info->ncached_max >> 1), tcache); in tcache_dalloc_large() 211 assert(bin->ncached < bin_info->ncached_max); in tcache_dalloc_large()
|
D | arena_externs.h | 49 void arena_alloc_junk_small(void *ptr, const bin_info_t *bin_info,
|
/external/tensorflow/tensorflow/core/common_runtime/gpu/ |
D | gpu_bfc_allocator_test.cc | 466 const BFCAllocator::BinDebugInfo& bin_info = bin_infos[i]; in TestBinDebugInfo() local 475 EXPECT_EQ(bin_info.total_bytes_in_use, allocated_size); in TestBinDebugInfo() 477 EXPECT_EQ(bin_info.total_bytes_in_bin, allocated_size); in TestBinDebugInfo() 479 EXPECT_EQ(bin_info.total_requested_bytes_in_use, requested_size); in TestBinDebugInfo() 480 EXPECT_EQ(bin_info.total_chunks_in_use, kNumChunksPerSize); in TestBinDebugInfo() 482 EXPECT_EQ(bin_info.total_chunks_in_bin, kNumChunksPerSize); in TestBinDebugInfo() 485 EXPECT_EQ(bin_info.total_bytes_in_use, 0); in TestBinDebugInfo() 487 EXPECT_EQ(bin_info.total_requested_bytes_in_use, 0); in TestBinDebugInfo() 488 EXPECT_EQ(bin_info.total_chunks_in_use, 0); in TestBinDebugInfo() 491 EXPECT_GT(bin_info.total_bytes_in_bin, 0); in TestBinDebugInfo() [all …]
|
/external/jemalloc_new/src/ |
D | android_je_iterate.c | 49 const bin_info_t* bin_info = &bin_infos[binind]; in je_malloc_iterate() local 53 size_t bin_size = bin_info->reg_size; in je_malloc_iterate() 56 for (size_t bit = (ptr - first_ptr) / bin_size; bit < bin_info->bitmap_info.nbits; bit++) { in je_malloc_iterate() 57 if (bitmap_get(slab_data->bitmap, &bin_info->bitmap_info, bit)) { in je_malloc_iterate()
|
D | arena.c | 224 arena_slab_reg_alloc(extent_t *slab, const bin_info_t *bin_info) { in arena_slab_reg_alloc() argument 230 assert(!bitmap_full(slab_data->bitmap, &bin_info->bitmap_info)); in arena_slab_reg_alloc() 232 regind = bitmap_sfu(slab_data->bitmap, &bin_info->bitmap_info); in arena_slab_reg_alloc() 234 (uintptr_t)(bin_info->reg_size * regind)); in arena_slab_reg_alloc() 266 const bin_info_t *bin_info = &bin_infos[binind]; in arena_slab_reg_dalloc() local 269 assert(extent_nfree_get(slab) < bin_info->nregs); in arena_slab_reg_dalloc() 271 assert(bitmap_get(slab_data->bitmap, &bin_info->bitmap_info, regind)); in arena_slab_reg_dalloc() 273 bitmap_unset(slab_data->bitmap, &bin_info->bitmap_info, regind); in arena_slab_reg_dalloc() 1093 extent_hooks_t **r_extent_hooks, const bin_info_t *bin_info, in arena_slab_alloc_hard() argument 1104 bin_info->slab_size, 0, PAGE, true, szind, &zero, &commit); in arena_slab_alloc_hard() [all …]
|
/external/tensorflow/tensorflow/core/common_runtime/ |
D | bfc_allocator.cc | 991 const BinDebugInfo& bin_info = bin_infos[bin_num]; in DumpMemoryLog() local 993 bin_info.total_chunks_in_bin - bin_info.total_chunks_in_use); in DumpMemoryLog() 996 << "): \tTotal Chunks: " << bin_info.total_chunks_in_bin in DumpMemoryLog() 997 << ", Chunks in use: " << bin_info.total_chunks_in_use << ". " in DumpMemoryLog() 998 << strings::HumanReadableNumBytes(bin_info.total_bytes_in_bin) in DumpMemoryLog() 1000 << strings::HumanReadableNumBytes(bin_info.total_bytes_in_use) in DumpMemoryLog() 1003 bin_info.total_requested_bytes_in_use) in DumpMemoryLog() 1106 const BinDebugInfo& bin_info = bin_infos[bin_num]; in RecordMemoryMapInternal() local 1108 bin_info.total_chunks_in_bin - bin_info.total_chunks_in_use); in RecordMemoryMapInternal() 1111 bs->set_total_bytes_in_use(bin_info.total_bytes_in_use); in RecordMemoryMapInternal() [all …]
|
/external/webp/src/enc/ |
D | histogram_enc.c | 659 } bin_info[BIN_SIZE]; in HistogramCombineEntropyBin() local 663 bin_info[idx].first = -1; in HistogramCombineEntropyBin() 664 bin_info[idx].num_combine_failures = 0; in HistogramCombineEntropyBin() 673 first = bin_info[bin_id].first; in HistogramCombineEntropyBin() 675 bin_info[bin_id].first = idx; in HistogramCombineEntropyBin() 699 bin_info[bin_id].num_combine_failures >= max_combine_failures) { in HistogramCombineEntropyBin() 705 ++bin_info[bin_id].num_combine_failures; in HistogramCombineEntropyBin()
|