Lines Matching refs:nb

3153 static void* mmap_alloc(mstate m, size_t nb) {
3154 size_t mmsize = granularity_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
3155 if (mmsize > nb) { /* Check for wrap around 0 */
3180 static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb) {
3182 if (is_small(nb)) /* Can't shrink mmap regions below small size */
3185 if (oldsize >= nb + SIZE_T_SIZE &&
3186 (oldsize - nb) <= (mparams.granularity << 1))
3191 size_t newmmsize = granularity_align(nb + SIX_SIZE_T_SIZES +
3262 size_t nb) {
3266 mchunkptr q = chunk_plus_offset(p, nb);
3267 size_t qsize = psize - nb;
3268 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3298 check_malloced_chunk(m, chunk2mem(p), nb);
3359 static void* sys_alloc(mstate m, size_t nb) {
3367 if (use_mmap(m) && nb >= mparams.mmap_threshold) {
3368 void* mem = mmap_alloc(m, nb);
3399 asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
3413 asize = granularity_align(nb - m->topsize + TOP_FOOT_SIZE + SIZE_T_ONE);
3425 asize < nb + TOP_FOOT_SIZE + SIZE_T_ONE) {
3426 size_t esize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE - asize);
3450 size_t req = nb + TOP_FOOT_SIZE + SIZE_T_ONE;
3452 if (rsize > nb) { /* Fail if wraps around zero */
3463 size_t asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
3473 if (ssize > nb + TOP_FOOT_SIZE) {
3527 return prepend_alloc(m, tbase, oldbase, nb);
3534 if (nb < m->topsize) { /* Allocate from new or extended top space */
3535 size_t rsize = m->topsize -= nb;
3537 mchunkptr r = m->top = chunk_plus_offset(p, nb);
3539 set_size_and_pinuse_of_inuse_chunk(m, p, nb);
3541 check_malloced_chunk(m, chunk2mem(p), nb);
3659 static void* tmalloc_large(mstate m, size_t nb) {
3661 size_t rsize = -nb; /* Unsigned negation */
3664 compute_tree_index(nb, idx);
3667 /* Traverse tree for this bin looking for node with size == nb */
3668 size_t sizebits = nb << leftshift_for_tree_index(idx);
3672 size_t trem = chunksize(t) - nb;
3683 t = rst; /* set t to least subtree holding sizes > nb */
3701 size_t trem = chunksize(t) - nb;
3710 if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
3712 mchunkptr r = chunk_plus_offset(v, nb);
3713 assert(chunksize(v) == rsize + nb);
3717 set_inuse_and_pinuse(m, v, (rsize + nb));
3719 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
3732 static void* tmalloc_small(mstate m, size_t nb) {
3740 rsize = chunksize(t) - nb;
3743 size_t trem = chunksize(t) - nb;
3751 mchunkptr r = chunk_plus_offset(v, nb);
3752 assert(chunksize(v) == rsize + nb);
3756 set_inuse_and_pinuse(m, v, (rsize + nb));
3758 set_size_and_pinuse_of_inuse_chunk(m, v, nb);
3788 size_t nb = request2size(bytes);
3790 newp = mmap_resize(m, oldp, nb);
3791 else if (oldsize >= nb) { /* already big enough */
3792 size_t rsize = oldsize - nb;
3795 mchunkptr remainder = chunk_plus_offset(newp, nb);
3796 set_inuse(m, newp, nb);
3801 else if (next == m->top && oldsize + m->topsize > nb) {
3804 size_t newtopsize = newsize - nb;
3805 mchunkptr newtop = chunk_plus_offset(oldp, nb);
3806 set_inuse(m, oldp, nb);
3860 size_t nb = request2size(bytes);
3861 size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
3903 if (size > nb + MIN_CHUNK_SIZE) {
3904 size_t remainder_size = size - nb;
3905 mchunkptr remainder = chunk_plus_offset(p, nb);
3906 set_inuse(m, p, nb);
3912 assert (chunksize(p) >= nb);
4087 size_t nb;
4091 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4092 idx = small_index(nb);
4104 check_malloced_chunk(gm, mem, nb);
4108 else if (nb > gm->dvsize) {
4120 rsize = small_index2size(i) - nb;
4125 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4126 r = chunk_plus_offset(p, nb);
4131 check_malloced_chunk(gm, mem, nb);
4135 else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
4136 check_malloced_chunk(gm, mem, nb);
4142 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4144 nb = pad_request(bytes);
4145 if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
4146 check_malloced_chunk(gm, mem, nb);
4151 if (nb <= gm->dvsize) {
4152 size_t rsize = gm->dvsize - nb;
4155 mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
4158 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4167 check_malloced_chunk(gm, mem, nb);
4171 else if (nb < gm->topsize) { /* Split top */
4172 size_t rsize = gm->topsize -= nb;
4174 mchunkptr r = gm->top = chunk_plus_offset(p, nb);
4176 set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
4179 check_malloced_chunk(gm, mem, nb);
4183 mem = sys_alloc(gm, nb);
4494 size_t nb;
4498 nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
4499 idx = small_index(nb);
4511 check_malloced_chunk(ms, mem, nb);
4515 else if (nb > ms->dvsize) {
4527 rsize = small_index2size(i) - nb;
4532 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4533 r = chunk_plus_offset(p, nb);
4538 check_malloced_chunk(ms, mem, nb);
4542 else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) {
4543 check_malloced_chunk(ms, mem, nb);
4549 nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
4551 nb = pad_request(bytes);
4552 if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) {
4553 check_malloced_chunk(ms, mem, nb);
4558 if (nb <= ms->dvsize) {
4559 size_t rsize = ms->dvsize - nb;
4562 mchunkptr r = ms->dv = chunk_plus_offset(p, nb);
4565 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4574 check_malloced_chunk(ms, mem, nb);
4578 else if (nb < ms->topsize) { /* Split top */
4579 size_t rsize = ms->topsize -= nb;
4581 mchunkptr r = ms->top = chunk_plus_offset(p, nb);
4583 set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
4586 check_malloced_chunk(ms, mem, nb);
4590 mem = sys_alloc(ms, nb);