/external/jemalloc/include/jemalloc/internal/ |
D | valgrind.h | 16 #define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do { \ argument 18 valgrind_make_mem_noaccess(ptr, usize); \ 20 #define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do { \ argument 22 valgrind_make_mem_undefined(ptr, usize); \ 24 #define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do { \ argument 26 valgrind_make_mem_defined(ptr, usize); \ 33 #define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero) do { \ argument 35 VALGRIND_MALLOCLIKE_BLOCK(ptr, usize, p2rz(ptr), zero); \ 37 #define JEMALLOC_VALGRIND_REALLOC(maybe_moved, ptr, usize, \ argument 45 usize, rzsize); \ [all …]
|
D | prof.h | 284 void prof_malloc_sample_object(const void *ptr, size_t usize, 286 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx); 332 bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit, 334 prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, 337 void prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx); 338 void prof_tctx_reset(const void *ptr, size_t usize, const void *old_ptr, 340 void prof_malloc_sample_object(const void *ptr, size_t usize, 342 void prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx); 343 void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, 346 void prof_free(tsd_t *tsd, const void *ptr, size_t usize); [all …]
|
D | jemalloc_internal.h | 628 size_t usize = grp_size + mod_size; in index2size_compute() local 629 return (usize); in index2size_compute() 669 size_t usize = (size + delta_mask) & ~delta_mask; in s2u_compute() local 670 return (usize); in s2u_compute() 704 size_t usize; in sa2u() local 724 usize = s2u(ALIGNMENT_CEILING(size, alignment)); in sa2u() 725 if (usize < LARGE_MINCLASS) in sa2u() 726 return (usize); in sa2u() 738 usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size); in sa2u() 744 if (usize + large_pad + alignment - PAGE <= arena_maxrun) in sa2u() [all …]
|
D | tcache.h | 300 size_t usize JEMALLOC_CC_SILENCE_INIT(0); in tcache_alloc_small() 324 usize = index2size(binind); in tcache_alloc_small() 325 assert(tcache_salloc(ret) == usize); in tcache_alloc_small() 334 memset(ret, 0, usize); in tcache_alloc_small() 341 memset(ret, 0, usize); in tcache_alloc_small() 347 tcache->prof_accumbytes += usize; in tcache_alloc_small() 377 size_t usize JEMALLOC_CC_SILENCE_INIT(0); in tcache_alloc_large() 382 usize = index2size(binind); in tcache_alloc_large() 383 assert(usize <= tcache_maxclass); in tcache_alloc_large() 386 if (config_prof && usize == LARGE_MINCLASS) { in tcache_alloc_large() [all …]
|
D | jemalloc_internal.h.in | 628 size_t usize = grp_size + mod_size; local 629 return (usize); 669 size_t usize = (size + delta_mask) & ~delta_mask; local 670 return (usize); 704 size_t usize; local 724 usize = s2u(ALIGNMENT_CEILING(size, alignment)); 725 if (usize < LARGE_MINCLASS) 726 return (usize); 738 usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size); 744 if (usize + large_pad + alignment - PAGE <= arena_maxrun) [all …]
|
D | huge.h | 12 void *huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero, 14 void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment, 19 size_t usize, size_t alignment, bool zero, tcache_t *tcache);
|
D | arena.h | 515 void *arena_chunk_alloc_huge(arena_t *arena, size_t usize, size_t alignment, 517 void arena_chunk_dalloc_huge(arena_t *arena, void *chunk, size_t usize); 519 size_t oldsize, size_t usize); 521 size_t oldsize, size_t usize); 523 size_t oldsize, size_t usize, bool *zero); 543 void arena_quarantine_junk_small(void *ptr, size_t usize); 547 void *arena_palloc(tsd_t *tsd, arena_t *arena, size_t usize, 560 void arena_dalloc_junk_large(void *ptr, size_t usize); 648 void arena_prof_tctx_set(const void *ptr, size_t usize, prof_tctx_t *tctx); 649 void arena_prof_tctx_reset(const void *ptr, size_t usize, [all …]
|
D | quarantine.h | 16 size_t usize; member
|
/external/jemalloc/src/ |
D | huge.c | 34 huge_malloc(tsd_t *tsd, arena_t *arena, size_t usize, bool zero, in huge_malloc() argument 38 assert(usize == s2u(usize)); in huge_malloc() 40 return (huge_palloc(tsd, arena, usize, chunksize, zero, tcache)); in huge_malloc() 44 huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment, in huge_palloc() argument 54 ausize = sa2u(usize, alignment); in huge_palloc() 83 usize, alignment, &is_zeroed)) == NULL) { in huge_palloc() 88 extent_node_init(node, arena, ret, usize, is_zeroed, true); in huge_palloc() 91 arena_chunk_dalloc_huge(arena, ret, usize); in huge_palloc() 104 memset(ret, 0, usize); in huge_palloc() 106 memset(ret, 0xa5, usize); in huge_palloc() [all …]
|
D | jemalloc.c | 1419 imalloc_prof_sample(tsd_t *tsd, size_t usize, szind_t ind, argument 1426 if (usize <= SMALL_MAXCLASS) { 1431 arena_prof_promoted(p, usize); 1433 p = imalloc(tsd, usize, ind, slow_path); 1439 imalloc_prof(tsd_t *tsd, size_t usize, szind_t ind, bool slow_path) argument 1444 tctx = prof_alloc_prep(tsd, usize, prof_active_get_unlocked(), true); 1446 p = imalloc_prof_sample(tsd, usize, ind, tctx, slow_path); 1448 p = imalloc(tsd, usize, ind, slow_path); 1453 prof_malloc(p, usize, tctx); 1459 imalloc_body(size_t size, tsd_t **tsd, size_t *usize, bool slow_path) argument [all …]
|
D | valgrind.c | 9 valgrind_make_mem_noaccess(void *ptr, size_t usize) in valgrind_make_mem_noaccess() argument 12 VALGRIND_MAKE_MEM_NOACCESS(ptr, usize); in valgrind_make_mem_noaccess() 16 valgrind_make_mem_undefined(void *ptr, size_t usize) in valgrind_make_mem_undefined() argument 19 VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize); in valgrind_make_mem_undefined() 23 valgrind_make_mem_defined(void *ptr, size_t usize) in valgrind_make_mem_defined() argument 26 VALGRIND_MAKE_MEM_DEFINED(ptr, usize); in valgrind_make_mem_defined() 30 valgrind_freelike_block(void *ptr, size_t usize) in valgrind_freelike_block() argument 33 VALGRIND_FREELIKE_BLOCK(ptr, usize); in valgrind_freelike_block()
|
D | quarantine.c | 102 assert(obj->usize == isalloc(obj->ptr, config_prof)); in quarantine_drain_one() 104 quarantine->curbytes -= obj->usize; in quarantine_drain_one() 122 size_t usize = isalloc(ptr, config_prof); in quarantine() local 135 if (quarantine->curbytes + usize > opt_quarantine) { in quarantine() 136 size_t upper_bound = (opt_quarantine >= usize) ? opt_quarantine in quarantine() 137 - usize : 0; in quarantine() 146 if (quarantine->curbytes + usize <= opt_quarantine) { in quarantine() 151 obj->usize = usize; in quarantine() 152 quarantine->curbytes += usize; in quarantine() 160 && usize <= SMALL_MAXCLASS) in quarantine() [all …]
|
D | arena.c | 799 arena_huge_malloc_stats_update(arena_t *arena, size_t usize) in arena_huge_malloc_stats_update() argument 801 szind_t index = size2index(usize) - nlclasses - NBINS; in arena_huge_malloc_stats_update() 806 arena->stats.allocated_huge += usize; in arena_huge_malloc_stats_update() 812 arena_huge_malloc_stats_update_undo(arena_t *arena, size_t usize) in arena_huge_malloc_stats_update_undo() argument 814 szind_t index = size2index(usize) - nlclasses - NBINS; in arena_huge_malloc_stats_update_undo() 819 arena->stats.allocated_huge -= usize; in arena_huge_malloc_stats_update_undo() 825 arena_huge_dalloc_stats_update(arena_t *arena, size_t usize) in arena_huge_dalloc_stats_update() argument 827 szind_t index = size2index(usize) - nlclasses - NBINS; in arena_huge_dalloc_stats_update() 832 arena->stats.allocated_huge -= usize; in arena_huge_dalloc_stats_update() 838 arena_huge_dalloc_stats_update_undo(arena_t *arena, size_t usize) in arena_huge_dalloc_stats_update_undo() argument [all …]
|
D | ckh.c | 265 size_t usize; in ckh_grow() local 268 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_grow() 269 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) { in ckh_grow() 273 tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, in ckh_grow() 305 size_t usize; in ckh_shrink() local 314 usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_shrink() 315 if (unlikely(usize == 0 || usize > HUGE_MAXCLASS)) in ckh_shrink() 317 tab = (ckhc_t *)ipallocztm(tsd, usize, CACHELINE, true, NULL, true, in ckh_shrink() 354 size_t mincells, usize; in ckh_new() local 389 usize = sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE); in ckh_new() [all …]
|
D | base.c | 82 size_t csize, usize; in base_alloc() local 92 usize = s2u(csize); in base_alloc() 93 extent_node_init(&key, NULL, NULL, usize, false, false); in base_alloc()
|
D | prof.c | 226 prof_malloc_sample_object(const void *ptr, size_t usize, prof_tctx_t *tctx) in prof_malloc_sample_object() argument 229 prof_tctx_set(ptr, usize, tctx); in prof_malloc_sample_object() 233 tctx->cnts.curbytes += usize; in prof_malloc_sample_object() 236 tctx->cnts.accumbytes += usize; in prof_malloc_sample_object() 243 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) in prof_free_sampled_object() argument 248 assert(tctx->cnts.curbytes >= usize); in prof_free_sampled_object() 250 tctx->cnts.curbytes -= usize; in prof_free_sampled_object()
|
/external/jemalloc/test/unit/ |
D | junk.c | 41 arena_dalloc_junk_large_intercept(void *ptr, size_t usize) in arena_dalloc_junk_large_intercept() argument 45 arena_dalloc_junk_large_orig(ptr, usize); in arena_dalloc_junk_large_intercept() 46 for (i = 0; i < usize; i++) { in arena_dalloc_junk_large_intercept() 49 i, usize); in arena_dalloc_junk_large_intercept() 56 huge_dalloc_junk_intercept(void *ptr, size_t usize) in huge_dalloc_junk_intercept() argument 59 huge_dalloc_junk_orig(ptr, usize); in huge_dalloc_junk_intercept() 171 arena_ralloc_junk_large_intercept(void *ptr, size_t old_usize, size_t usize) in arena_ralloc_junk_large_intercept() argument 174 arena_ralloc_junk_large_orig(ptr, old_usize, usize); in arena_ralloc_junk_large_intercept() 176 assert_zu_eq(usize, shrink_size(large_maxclass), "Unexpected usize"); in arena_ralloc_junk_large_intercept() 203 arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after, in arena_redzone_corruption_replacement() argument
|
D | quarantine.c | 62 arena_redzone_corruption_replacement(void *ptr, size_t usize, bool after, in arena_redzone_corruption_replacement() argument
|
/external/jemalloc/test/integration/ |
D | allocated.c | 18 size_t sz, usize; in thd_start() local 69 usize = malloc_usable_size(p); in thd_start() 70 assert_u64_le(a0 + usize, a1, in thd_start() 86 assert_u64_le(d0 + usize, d1, in thd_start()
|
/external/tpm2/ |
D | TpmFail.c | 116 UINT32 usize; in UnmarshalHeader() local 119 || UINT32_Unmarshal(&usize, buffer, size) != TPM_RC_SUCCESS in UnmarshalHeader() 123 header->size = usize; in UnmarshalHeader()
|
D | part4.txt | 19004 67 UINT32 usize; 19007 70 || UINT32_Unmarshal(&usize, buffer, size) != TPM_RC_SUCCESS 19011 74 header->size = usize;
|
/external/v8/src/runtime/ |
D | runtime-atomics.cc | 662 uint32_t usize = NumberToUint32(*size); in RUNTIME_FUNCTION() local 663 return isolate->heap()->ToBoolean(AtomicIsLockFree(usize)); in RUNTIME_FUNCTION()
|
/external/elfutils/src/ |
D | readelf.c | 4354 uint8_t usize = *(uint8_t *) data++; in print_ops() local 4355 NEED (usize); in print_ops() 4358 print_block (usize, data); in print_ops() 4359 data += usize; in print_ops() 4385 usize = *(uint8_t *) data++; in print_ops() 4392 op_name, usize, uleb); in print_ops()
|
/external/opencv3/modules/core/src/ |
D | arithm.cpp | 5566 Size ssize = _src.size(), lsize = _lowerb.size(), usize = _upperb.size(); in ocl_inRange() local 5582 ssize != usize || stype != utype ) in ocl_inRange()
|