Lines Matching refs:size

29     uint16_t               size;  member
58 uint32_t size; member
68 uint32_t size; member
99 (page->size < NXT_MEM_ZONE_PAGE_USED)
103 (page->size >= 16)
107 (slot->size < zone->small_bitmap_min_size)
118 #define nxt_mem_zone_fresh_junk(p, size) \ argument
119 nxt_memset((p), 0xA5, size)
122 #define nxt_mem_zone_free_junk(p, size) \ argument
123 nxt_memset((p), 0x5A, size)
135 nxt_mem_zone_slot_t *slot, size_t size);
137 nxt_uint_t size);
139 size_t size);
184 page[n].size = NXT_MEM_ZONE_PAGE_FRESH; in nxt_mem_zone_init()
192 zone->sentinel_page.size = NXT_MEM_ZONE_PAGE_USED; in nxt_mem_zone_init()
201 block->size = pages; in nxt_mem_zone_init()
214 nxt_uint_t n, pages, size, chunks, last; in nxt_mem_zone_pages() local
223 size = 32; in nxt_mem_zone_pages()
226 chunks = page_size / size; in nxt_mem_zone_pages()
233 size += 16; in nxt_mem_zone_pages()
288 nxt_uint_t n, size, chunks; in nxt_mem_zone_slots_init() local
294 slot[0].size = 16; in nxt_mem_zone_slots_init()
297 size = 32; in nxt_mem_zone_slots_init()
300 chunks = page_size / size; in nxt_mem_zone_slots_init()
308 slot[n].size, slot[n].chunks + 1, in nxt_mem_zone_slots_init()
319 slot[n].size = size; in nxt_mem_zone_slots_init()
320 size += 16; in nxt_mem_zone_slots_init()
337 slot->start = page_size - slot->chunks * slot->size; in nxt_mem_zone_slot_init()
350 if (slot->size < slot->map_size) { in nxt_mem_zone_slot_init()
352 slot->chunks -= slot->map_size / slot->size; in nxt_mem_zone_slot_init()
358 slot->start = slot->size; in nxt_mem_zone_slot_init()
389 uint32_t n, size, size1, size2; in nxt_mem_zone_rbtree_compare() local
395 size1 = block1->size; in nxt_mem_zone_rbtree_compare()
396 size2 = block2->size; in nxt_mem_zone_rbtree_compare()
415 size = nxt_next_highest_power_of_two(size1) >> 1; in nxt_mem_zone_rbtree_compare()
418 start1 = nxt_align_ptr(block1, size); in nxt_mem_zone_rbtree_compare()
419 end1 = nxt_trunc_ptr((u_char *) block1 + size1, size); in nxt_mem_zone_rbtree_compare()
421 start2 = nxt_align_ptr(block2, size); in nxt_mem_zone_rbtree_compare()
422 end2 = nxt_trunc_ptr((u_char *) block2 + size2, size); in nxt_mem_zone_rbtree_compare()
429 nxt_mem_zone_zalloc(nxt_mem_zone_t *zone, size_t size) in nxt_mem_zone_zalloc() argument
433 p = nxt_mem_zone_align(zone, 1, size); in nxt_mem_zone_zalloc()
436 nxt_memzero(p, size); in nxt_mem_zone_zalloc()
444 nxt_mem_zone_align(nxt_mem_zone_t *zone, size_t alignment, size_t size) in nxt_mem_zone_align() argument
454 if (size <= zone->max_chunk_size && alignment <= zone->max_chunk_size) { in nxt_mem_zone_align()
465 size = nxt_next_highest_power_of_two(size); in nxt_mem_zone_align()
466 size = nxt_max(size, alignment); in nxt_mem_zone_align()
473 for (slot = zone->slots; slot->size < size; slot++) { /* void */ } in nxt_mem_zone_align()
476 alignment, size, slot->size); in nxt_mem_zone_align()
480 p = nxt_mem_zone_alloc_small(zone, slot, size); in nxt_mem_zone_align()
484 nxt_thread_log_debug("mem zone alloc: @%uz:%uz", alignment, size); in nxt_mem_zone_align()
488 p = nxt_mem_zone_alloc_large(zone, alignment, size); in nxt_mem_zone_align()
499 alignment, size); in nxt_mem_zone_align()
508 size_t size) in nxt_mem_zone_alloc_small() argument
528 p += nxt_mem_zone_alloc_chunk(map, slot->start, slot->size); in nxt_mem_zone_alloc_small()
552 page->size = slot->size; in nxt_mem_zone_alloc_small()
580 nxt_mem_zone_alloc_chunk(uint8_t *map, nxt_uint_t offset, nxt_uint_t size) in nxt_mem_zone_alloc_chunk() argument
606 offset += size; in nxt_mem_zone_alloc_chunk()
613 offset += size * 8; in nxt_mem_zone_alloc_chunk()
622 offset += size * 32; in nxt_mem_zone_alloc_chunk()
630 nxt_mem_zone_alloc_large(nxt_mem_zone_t *zone, size_t alignment, size_t size) in nxt_mem_zone_alloc_large() argument
635 pages = (size + zone->page_size_mask) >> zone->page_size_shift; in nxt_mem_zone_alloc_large()
665 node_pages = block->size; in nxt_mem_zone_alloc_pages()
678 block->size = prev_pages; in nxt_mem_zone_alloc_pages()
690 next_block->size = next_pages; in nxt_mem_zone_alloc_pages()
702 if (page[n].size == NXT_MEM_ZONE_PAGE_FRESH) { in nxt_mem_zone_alloc_pages()
707 page[n].size = NXT_MEM_ZONE_PAGE_USED; in nxt_mem_zone_alloc_pages()
739 if (pages <= block->size) { in nxt_mem_zone_find_free_block()
749 if (pages == block->size) { in nxt_mem_zone_find_free_block()
757 end = nxt_pointer_to(block, block->size << zone->page_size_shift); in nxt_mem_zone_find_free_block()
826 uint32_t size, offset, chunk; in nxt_mem_zone_free_chunk() local
830 size = page->size; in nxt_mem_zone_free_chunk()
833 for (slot = zone->slots; slot->size < size; slot++) { /* void */ } in nxt_mem_zone_free_chunk()
838 chunk = offset / size; in nxt_mem_zone_free_chunk()
840 if (nxt_slow_path(offset != chunk * size)) { in nxt_mem_zone_free_chunk()
858 nxt_mem_zone_free_junk(p, page->size); in nxt_mem_zone_free_chunk()
906 page->size = NXT_MEM_ZONE_PAGE_FREE; in nxt_mem_zone_free_pages()
923 count += next_block->size; in nxt_mem_zone_free_pages()
936 count += prev_block->size; in nxt_mem_zone_free_pages()
939 prev_block->size = count; in nxt_mem_zone_free_pages()
946 block->size = count; in nxt_mem_zone_free_pages()