Lines Matching refs:pool

74  * Creates a new pool
79 struct compute_memory_pool* pool = (struct compute_memory_pool*)
84 pool->screen = rscreen;
85 return pool;
88 static void compute_memory_pool_init(struct compute_memory_pool * pool,
95 /* XXX: pool->shadow is used when the buffer needs to be resized, but
97 * pool->shadow = (uint32_t*)CALLOC(4, pool->size_in_dw);
99 pool->next_id = 1;
100 pool->size_in_dw = initial_size_in_dw;
101 pool->bo = (struct r600_resource*)create_pool_texture(pool->screen,
102 pool->size_in_dw);
106 * Frees all stuff in the pool and the pool struct itself too
108 void compute_memory_pool_delete(struct compute_memory_pool* pool)
111 free(pool->shadow);
112 if (pool->bo) {
113 pool->screen->screen.resource_destroy((struct pipe_screen *)
114 pool->screen, (struct pipe_resource *)pool->bo);
116 free(pool);
120 * Searches for an empty space in the pool, return with the pointer to the
121 * allocatable space in the pool, returns -1 on failure.
124 struct compute_memory_pool* pool,
127 assert(size_in_dw <= pool->size_in_dw);
136 for (item = pool->item_list; item; item = item->next) {
147 if (pool->size_in_dw - last_end < size_in_dw) {
158 struct compute_memory_pool* pool,
166 for (item = pool->item_list; item; item = item->next) {
185 * Reallocates pool, conserves data
187 void compute_memory_grow_pool(struct compute_memory_pool* pool,
193 assert(new_size_in_dw >= pool->size_in_dw);
195 assert(!pool->bo && "Growing the global memory pool is not yet "
199 if (!pool->bo) {
200 compute_memory_pool_init(pool, 1024 * 16);
206 fprintf(stderr, "Warning: growing the global memory pool to"
212 compute_memory_shadow(pool, pipe, 1);
213 pool->shadow = (uint32_t*)realloc(pool->shadow, new_size_in_dw*4);
214 pool->size_in_dw = new_size_in_dw;
215 pool->screen->screen.resource_destroy(
216 (struct pipe_screen *)pool->screen,
217 (struct pipe_resource *)pool->bo);
218 pool->bo = (struct r600_resource*)create_pool_texture(
219 pool->screen,
220 pool->size_in_dw);
221 compute_memory_shadow(pool, pipe, 0);
226 * Copy pool from device to host, or host to device.
228 void compute_memory_shadow(struct compute_memory_pool* pool,
238 chunk.size_in_dw = pool->size_in_dw;
240 compute_memory_transfer(pool, pipe, device_to_host, &chunk,
241 pool->shadow, 0, pool->size_in_dw*4);
245 * Allocates pending allocations in the pool
247 void compute_memory_finalize_pending(struct compute_memory_pool* pool,
258 for (item = pool->item_list; item; item = item->next) {
262 for (item = pool->item_list; item; item = next) {
278 pool->item_list = next;
296 if (pool->size_in_dw < allocated+unallocated) {
297 compute_memory_grow_pool(pool, pipe, allocated+unallocated);
305 while ((start_in_dw=compute_memory_prealloc_chunk(pool,
308 (pool->size_in_dw - allocated);
313 compute_memory_grow_pool(pool,
315 pool->size_in_dw + need);
318 need = pool->size_in_dw / 10;
320 compute_memory_grow_pool(pool,
322 pool->size_in_dw + need);
330 if (pool->item_list) {
333 pos = compute_memory_postalloc_chunk(pool, start_in_dw);
343 pool->item_list = item;
351 void compute_memory_free(struct compute_memory_pool* pool, int64_t id)
357 for (item = pool->item_list; item; item = next) {
365 pool->item_list = item->next;
388 struct compute_memory_pool* pool,
399 new_item->id = pool->next_id++;
400 new_item->pool = pool;
404 if (pool->item_list) {
405 for (last_item = pool->item_list; last_item->next;
412 pool->item_list = new_item;
422 struct compute_memory_pool* pool,
430 int64_t aligned_size = pool->size_in_dw;
431 struct pipe_resource* gart = (struct pipe_resource*)pool->bo;
471 struct compute_memory_pool* pool,