Lines Matching refs:nb
3842 static void* mmap_alloc(mstate m, size_t nb) { in mmap_alloc() argument
3843 size_t mmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK); in mmap_alloc()
3849 if (mmsize > nb) { /* Check for wrap around 0 */ in mmap_alloc()
3874 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb, int flags) { in mmap_resize() argument
3877 if (is_small(nb)) /* Can't shrink mmap regions below small size */ in mmap_resize()
3880 if (oldsize >= nb + SIZE_T_SIZE && in mmap_resize()
3881 (oldsize - nb) <= (mparams.granularity << 1)) in mmap_resize()
3886 size_t newmmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK); in mmap_resize()
3957 size_t nb) { in prepend_alloc() argument
3961 mchunkptr q = chunk_plus_offset(p, nb); in prepend_alloc()
3962 size_t qsize = psize - nb; in prepend_alloc()
3963 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in prepend_alloc()
3993 check_malloced_chunk(m, chunk2mem(p), nb); in prepend_alloc()
4053 static void* sys_alloc(mstate m, size_t nb) { in sys_alloc() argument
4062 if (use_mmap(m) && nb >= mparams.mmap_threshold && m->topsize != 0) { in sys_alloc()
4063 void* mem = mmap_alloc(m, nb); in sys_alloc()
4068 asize = granularity_align(nb + SYS_ALLOC_PADDING); in sys_alloc()
4069 if (asize <= nb) { in sys_alloc()
4121 if (ssize > nb && ssize < HALF_MAX_SIZE_T && in sys_alloc()
4132 ssize = granularity_align(nb - m->topsize + SYS_ALLOC_PADDING); in sys_alloc()
4144 ssize < nb + SYS_ALLOC_PADDING) { in sys_alloc()
4145 size_t esize = granularity_align(nb + SYS_ALLOC_PADDING - ssize); in sys_alloc()
4187 if (ssize > nb + TOP_FOOT_SIZE) { in sys_alloc()
4246 return prepend_alloc(m, tbase, oldbase, nb); in sys_alloc()
4253 if (nb < m->topsize) { /* Allocate from new or extended top space */ in sys_alloc()
4254 size_t rsize = m->topsize -= nb; in sys_alloc()
4256 mchunkptr r = m->top = chunk_plus_offset(p, nb); in sys_alloc()
4258 set_size_and_pinuse_of_inuse_chunk(m, p, nb); in sys_alloc()
4260 check_malloced_chunk(m, chunk2mem(p), nb); in sys_alloc()
4458 static void* tmalloc_large(mstate m, size_t nb) { in tmalloc_large() argument
4460 size_t rsize = -nb; /* Unsigned negation */ in tmalloc_large()
4463 compute_tree_index(nb, idx); in tmalloc_large()
4466 size_t sizebits = nb << leftshift_for_tree_index(idx); in tmalloc_large()
4470 size_t trem = chunksize(t) - nb; in tmalloc_large()
4498 size_t trem = chunksize(t) - nb; in tmalloc_large()
4507 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) { in tmalloc_large()
4509 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_large()
4510 assert(chunksize(v) == rsize + nb); in tmalloc_large()
4514 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_large()
4516 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_large()
4529 static void* tmalloc_small(mstate m, size_t nb) { in tmalloc_small() argument
4536 rsize = chunksize(t) - nb; in tmalloc_small()
4539 size_t trem = chunksize(t) - nb; in tmalloc_small()
4547 mchunkptr r = chunk_plus_offset(v, nb); in tmalloc_small()
4548 assert(chunksize(v) == rsize + nb); in tmalloc_small()
4552 set_inuse_and_pinuse(m, v, (rsize + nb)); in tmalloc_small()
4554 set_size_and_pinuse_of_inuse_chunk(m, v, nb); in tmalloc_small()
4598 size_t nb; in dlmalloc() local
4602 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in dlmalloc()
4603 idx = small_index(nb); in dlmalloc()
4615 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4619 else if (nb > gm->dvsize) { in dlmalloc()
4631 rsize = small_index2size(i) - nb; in dlmalloc()
4636 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4637 r = chunk_plus_offset(p, nb); in dlmalloc()
4642 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4646 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) { in dlmalloc()
4647 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4653 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in dlmalloc()
4655 nb = pad_request(bytes); in dlmalloc()
4656 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) { in dlmalloc()
4657 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4662 if (nb <= gm->dvsize) { in dlmalloc()
4663 size_t rsize = gm->dvsize - nb; in dlmalloc()
4666 mchunkptr r = gm->dv = chunk_plus_offset(p, nb); in dlmalloc()
4669 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4678 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4682 else if (nb < gm->topsize) { /* Split top */ in dlmalloc()
4683 size_t rsize = gm->topsize -= nb; in dlmalloc()
4685 mchunkptr r = gm->top = chunk_plus_offset(p, nb); in dlmalloc()
4687 set_size_and_pinuse_of_inuse_chunk(gm, p, nb); in dlmalloc()
4690 check_malloced_chunk(gm, mem, nb); in dlmalloc()
4694 mem = sys_alloc(gm, nb); in dlmalloc()
4840 static mchunkptr try_realloc_chunk(mstate m, mchunkptr p, size_t nb, in try_realloc_chunk() argument
4848 newp = mmap_resize(m, p, nb, can_move); in try_realloc_chunk()
4850 else if (oldsize >= nb) { /* already big enough */ in try_realloc_chunk()
4851 size_t rsize = oldsize - nb; in try_realloc_chunk()
4853 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4854 set_inuse(m, p, nb); in try_realloc_chunk()
4861 if (oldsize + m->topsize > nb) { in try_realloc_chunk()
4863 size_t newtopsize = newsize - nb; in try_realloc_chunk()
4864 mchunkptr newtop = chunk_plus_offset(p, nb); in try_realloc_chunk()
4865 set_inuse(m, p, nb); in try_realloc_chunk()
4874 if (oldsize + dvs >= nb) { in try_realloc_chunk()
4875 size_t dsize = oldsize + dvs - nb; in try_realloc_chunk()
4877 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4879 set_inuse(m, p, nb); in try_realloc_chunk()
4896 if (oldsize + nextsize >= nb) { in try_realloc_chunk()
4897 size_t rsize = oldsize + nextsize - nb; in try_realloc_chunk()
4904 mchunkptr r = chunk_plus_offset(p, nb); in try_realloc_chunk()
4905 set_inuse(m, p, nb); in try_realloc_chunk()
4934 size_t nb = request2size(bytes); in internal_memalign() local
4935 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD; in internal_memalign()
4974 if (size > nb + MIN_CHUNK_SIZE) { in internal_memalign()
4975 size_t remainder_size = size - nb; in internal_memalign()
4976 mchunkptr remainder = chunk_plus_offset(p, nb); in internal_memalign()
4977 set_inuse(m, p, nb); in internal_memalign()
4984 assert (chunksize(p) >= nb); in internal_memalign()
5228 size_t nb = request2size(bytes); in dlrealloc() local
5240 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in dlrealloc()
5266 size_t nb = request2size(bytes); in dlrealloc_in_place() local
5278 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in dlrealloc_in_place()
5546 size_t nb; in mspace_malloc() local
5550 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); in mspace_malloc()
5551 idx = small_index(nb); in mspace_malloc()
5563 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5567 else if (nb > ms->dvsize) { in mspace_malloc()
5579 rsize = small_index2size(i) - nb; in mspace_malloc()
5584 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5585 r = chunk_plus_offset(p, nb); in mspace_malloc()
5590 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5594 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) { in mspace_malloc()
5595 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5601 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */ in mspace_malloc()
5603 nb = pad_request(bytes); in mspace_malloc()
5604 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) { in mspace_malloc()
5605 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5610 if (nb <= ms->dvsize) { in mspace_malloc()
5611 size_t rsize = ms->dvsize - nb; in mspace_malloc()
5614 mchunkptr r = ms->dv = chunk_plus_offset(p, nb); in mspace_malloc()
5617 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5626 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5630 else if (nb < ms->topsize) { /* Split top */ in mspace_malloc()
5631 size_t rsize = ms->topsize -= nb; in mspace_malloc()
5633 mchunkptr r = ms->top = chunk_plus_offset(p, nb); in mspace_malloc()
5635 set_size_and_pinuse_of_inuse_chunk(ms, p, nb); in mspace_malloc()
5638 check_malloced_chunk(ms, mem, nb); in mspace_malloc()
5642 mem = sys_alloc(ms, nb); in mspace_malloc()
5787 size_t nb = request2size(bytes); in mspace_realloc() local
5799 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 1); in mspace_realloc()
5825 size_t nb = request2size(bytes); in mspace_realloc_in_place() local
5838 mchunkptr newp = try_realloc_chunk(m, oldp, nb, 0); in mspace_realloc_in_place()