1/*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22 * IN THE SOFTWARE.
23 */
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "util/mesa-sha1.h"
31#include "radv_private.h"
32#include "sid.h"
33
34VkResult radv_CreateDescriptorSetLayout(
35	VkDevice                                    _device,
36	const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
37	const VkAllocationCallbacks*                pAllocator,
38	VkDescriptorSetLayout*                      pSetLayout)
39{
40	RADV_FROM_HANDLE(radv_device, device, _device);
41	struct radv_descriptor_set_layout *set_layout;
42
43	assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
44
45	uint32_t max_binding = 0;
46	uint32_t immutable_sampler_count = 0;
47	for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
48		max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
49		if (pCreateInfo->pBindings[j].pImmutableSamplers)
50			immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
51	}
52
53	size_t size = sizeof(struct radv_descriptor_set_layout) +
54		(max_binding + 1) * sizeof(set_layout->binding[0]) +
55		immutable_sampler_count * sizeof(struct radv_sampler *);
56
57	set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
58				 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
59	if (!set_layout)
60		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
61
62	/* We just allocate all the samplers at the end of the struct */
63	struct radv_sampler **samplers =
64		(struct radv_sampler **)&set_layout->binding[max_binding + 1];
65
66	set_layout->binding_count = max_binding + 1;
67	set_layout->shader_stages = 0;
68	set_layout->size = 0;
69
70	memset(set_layout->binding, 0, size - sizeof(struct radv_descriptor_set_layout));
71
72	uint32_t buffer_count = 0;
73	uint32_t dynamic_offset_count = 0;
74
75	for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
76		const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
77		uint32_t b = binding->binding;
78		uint32_t alignment;
79
80		switch (binding->descriptorType) {
81		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
82		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
83			set_layout->binding[b].dynamic_offset_count = 1;
84			set_layout->dynamic_shader_stages |= binding->stageFlags;
85			set_layout->binding[b].size = 0;
86			set_layout->binding[b].buffer_count = 1;
87			alignment = 1;
88			break;
89		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
90		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
91		case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
92		case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
93			set_layout->binding[b].size = 16;
94			set_layout->binding[b].buffer_count = 1;
95			alignment = 16;
96			break;
97		case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
98		case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
99		case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
100			/* main descriptor + fmask descriptor */
101			set_layout->binding[b].size = 64;
102			set_layout->binding[b].buffer_count = 1;
103			alignment = 32;
104			break;
105		case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
106			/* main descriptor + fmask descriptor + sampler */
107			set_layout->binding[b].size = 96;
108			set_layout->binding[b].buffer_count = 1;
109			alignment = 32;
110			break;
111		case VK_DESCRIPTOR_TYPE_SAMPLER:
112			set_layout->binding[b].size = 16;
113			alignment = 16;
114			break;
115		default:
116			unreachable("unknown descriptor type\n");
117			break;
118		}
119
120		set_layout->size = align(set_layout->size, alignment);
121		assert(binding->descriptorCount > 0);
122		set_layout->binding[b].type = binding->descriptorType;
123		set_layout->binding[b].array_size = binding->descriptorCount;
124		set_layout->binding[b].offset = set_layout->size;
125		set_layout->binding[b].buffer_offset = buffer_count;
126		set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
127
128		set_layout->size += binding->descriptorCount * set_layout->binding[b].size;
129		buffer_count += binding->descriptorCount * set_layout->binding[b].buffer_count;
130		dynamic_offset_count += binding->descriptorCount *
131			set_layout->binding[b].dynamic_offset_count;
132
133
134		if (binding->pImmutableSamplers) {
135			set_layout->binding[b].immutable_samplers = samplers;
136			samplers += binding->descriptorCount;
137
138			for (uint32_t i = 0; i < binding->descriptorCount; i++)
139				set_layout->binding[b].immutable_samplers[i] =
140					radv_sampler_from_handle(binding->pImmutableSamplers[i]);
141		} else {
142			set_layout->binding[b].immutable_samplers = NULL;
143		}
144
145		set_layout->shader_stages |= binding->stageFlags;
146	}
147
148	set_layout->buffer_count = buffer_count;
149	set_layout->dynamic_offset_count = dynamic_offset_count;
150
151	*pSetLayout = radv_descriptor_set_layout_to_handle(set_layout);
152
153	return VK_SUCCESS;
154}
155
156void radv_DestroyDescriptorSetLayout(
157	VkDevice                                    _device,
158	VkDescriptorSetLayout                       _set_layout,
159	const VkAllocationCallbacks*                pAllocator)
160{
161	RADV_FROM_HANDLE(radv_device, device, _device);
162	RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, _set_layout);
163
164	if (!set_layout)
165		return;
166
167	vk_free2(&device->alloc, pAllocator, set_layout);
168}
169
170/*
171 * Pipeline layouts.  These have nothing to do with the pipeline.  They are
172 * just muttiple descriptor set layouts pasted together
173 */
174
175VkResult radv_CreatePipelineLayout(
176	VkDevice                                    _device,
177	const VkPipelineLayoutCreateInfo*           pCreateInfo,
178	const VkAllocationCallbacks*                pAllocator,
179	VkPipelineLayout*                           pPipelineLayout)
180{
181	RADV_FROM_HANDLE(radv_device, device, _device);
182	struct radv_pipeline_layout *layout;
183	struct mesa_sha1 *ctx;
184
185	assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
186
187	layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
188			     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
189	if (layout == NULL)
190		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
191
192	layout->num_sets = pCreateInfo->setLayoutCount;
193
194	unsigned dynamic_offset_count = 0;
195
196
197	ctx = _mesa_sha1_init();
198	for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
199		RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout,
200				 pCreateInfo->pSetLayouts[set]);
201		layout->set[set].layout = set_layout;
202
203		layout->set[set].dynamic_offset_start = dynamic_offset_count;
204		for (uint32_t b = 0; b < set_layout->binding_count; b++) {
205			dynamic_offset_count += set_layout->binding[b].array_size * set_layout->binding[b].dynamic_offset_count;
206		}
207		_mesa_sha1_update(ctx, set_layout->binding,
208				  sizeof(set_layout->binding[0]) * set_layout->binding_count);
209	}
210
211	layout->dynamic_offset_count = dynamic_offset_count;
212	layout->push_constant_size = 0;
213	for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
214		const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
215		layout->push_constant_size = MAX2(layout->push_constant_size,
216						  range->offset + range->size);
217	}
218
219	layout->push_constant_size = align(layout->push_constant_size, 16);
220	_mesa_sha1_update(ctx, &layout->push_constant_size,
221			  sizeof(layout->push_constant_size));
222	_mesa_sha1_final(ctx, layout->sha1);
223	*pPipelineLayout = radv_pipeline_layout_to_handle(layout);
224
225	return VK_SUCCESS;
226}
227
228void radv_DestroyPipelineLayout(
229	VkDevice                                    _device,
230	VkPipelineLayout                            _pipelineLayout,
231	const VkAllocationCallbacks*                pAllocator)
232{
233	RADV_FROM_HANDLE(radv_device, device, _device);
234	RADV_FROM_HANDLE(radv_pipeline_layout, pipeline_layout, _pipelineLayout);
235
236	if (!pipeline_layout)
237		return;
238	vk_free2(&device->alloc, pAllocator, pipeline_layout);
239}
240
241#define EMPTY 1
242
243static VkResult
244radv_descriptor_set_create(struct radv_device *device,
245			   struct radv_descriptor_pool *pool,
246			   struct radv_cmd_buffer *cmd_buffer,
247			   const struct radv_descriptor_set_layout *layout,
248			   struct radv_descriptor_set **out_set)
249{
250	struct radv_descriptor_set *set;
251	unsigned mem_size = sizeof(struct radv_descriptor_set) +
252		sizeof(struct radeon_winsys_bo *) * layout->buffer_count;
253	set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
254			  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
255
256	if (!set)
257		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
258
259	memset(set, 0, mem_size);
260
261	if (layout->dynamic_offset_count) {
262		unsigned size = sizeof(struct radv_descriptor_range) *
263		                layout->dynamic_offset_count;
264		set->dynamic_descriptors = vk_alloc2(&device->alloc, NULL, size, 8,
265			                               VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
266
267		if (!set->dynamic_descriptors) {
268			vk_free2(&device->alloc, NULL, set);
269			return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
270		}
271	}
272
273	set->layout = layout;
274	if (layout->size) {
275		uint32_t layout_size = align_u32(layout->size, 32);
276		set->size = layout->size;
277		if (!cmd_buffer) {
278			if (pool->current_offset + layout_size <= pool->size &&
279			    pool->allocated_sets < pool->max_sets) {
280				set->bo = pool->bo;
281				set->mapped_ptr = (uint32_t*)(pool->mapped_ptr + pool->current_offset);
282				set->va = device->ws->buffer_get_va(set->bo) + pool->current_offset;
283				pool->current_offset += layout_size;
284				++pool->allocated_sets;
285			} else {
286				int entry = pool->free_list, prev_entry = -1;
287				uint32_t offset;
288				while (entry >= 0) {
289					if (pool->free_nodes[entry].size >= layout_size) {
290						if (prev_entry >= 0)
291							pool->free_nodes[prev_entry].next = pool->free_nodes[entry].next;
292						else
293							pool->free_list = pool->free_nodes[entry].next;
294						break;
295					}
296					prev_entry = entry;
297					entry = pool->free_nodes[entry].next;
298				}
299
300				if (entry < 0) {
301					vk_free2(&device->alloc, NULL, set);
302					return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
303				}
304				offset = pool->free_nodes[entry].offset;
305				pool->free_nodes[entry].next = pool->full_list;
306				pool->full_list = entry;
307
308				set->bo = pool->bo;
309				set->mapped_ptr = (uint32_t*)(pool->mapped_ptr + offset);
310				set->va = device->ws->buffer_get_va(set->bo) + offset;
311			}
312		} else {
313			unsigned bo_offset;
314			if (!radv_cmd_buffer_upload_alloc(cmd_buffer, set->size, 32,
315							  &bo_offset,
316							  (void**)&set->mapped_ptr)) {
317				vk_free2(&device->alloc, NULL, set->dynamic_descriptors);
318				vk_free2(&device->alloc, NULL, set);
319				return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
320			}
321
322			set->va = device->ws->buffer_get_va(cmd_buffer->upload.upload_bo);
323			set->va += bo_offset;
324		}
325	}
326
327	if (pool)
328		list_add(&set->descriptor_pool, &pool->descriptor_sets);
329	else
330		list_inithead(&set->descriptor_pool);
331
332	for (unsigned i = 0; i < layout->binding_count; ++i) {
333		if (!layout->binding[i].immutable_samplers)
334			continue;
335
336		unsigned offset = layout->binding[i].offset / 4;
337		if (layout->binding[i].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
338			offset += 16;
339
340		for (unsigned j = 0; j < layout->binding[i].array_size; ++j) {
341			struct radv_sampler* sampler = layout->binding[i].immutable_samplers[j];
342
343			memcpy(set->mapped_ptr + offset, &sampler->state, 16);
344			offset += layout->binding[i].size / 4;
345		}
346
347	}
348	*out_set = set;
349	return VK_SUCCESS;
350}
351
352static void
353radv_descriptor_set_destroy(struct radv_device *device,
354			    struct radv_descriptor_pool *pool,
355			    struct radv_descriptor_set *set,
356			    bool free_bo)
357{
358	if (free_bo && set->size) {
359		assert(pool->full_list >= 0);
360		int next = pool->free_nodes[pool->full_list].next;
361		pool->free_nodes[pool->full_list].next = pool->free_list;
362		pool->free_nodes[pool->full_list].offset = (uint8_t*)set->mapped_ptr - pool->mapped_ptr;
363		pool->free_nodes[pool->full_list].size = align_u32(set->size, 32);
364		pool->free_list = pool->full_list;
365		pool->full_list = next;
366	}
367	if (set->dynamic_descriptors)
368		vk_free2(&device->alloc, NULL, set->dynamic_descriptors);
369	if (!list_empty(&set->descriptor_pool))
370		list_del(&set->descriptor_pool);
371	vk_free2(&device->alloc, NULL, set);
372}
373
374VkResult
375radv_temp_descriptor_set_create(struct radv_device *device,
376				struct radv_cmd_buffer *cmd_buffer,
377				VkDescriptorSetLayout _layout,
378				VkDescriptorSet *_set)
379{
380	RADV_FROM_HANDLE(radv_descriptor_set_layout, layout, _layout);
381	struct radv_descriptor_set *set;
382	VkResult ret;
383
384	ret = radv_descriptor_set_create(device, NULL, cmd_buffer, layout, &set);
385	*_set = radv_descriptor_set_to_handle(set);
386	return ret;
387}
388
389void
390radv_temp_descriptor_set_destroy(struct radv_device *device,
391				 VkDescriptorSet _set)
392{
393	RADV_FROM_HANDLE(radv_descriptor_set, set, _set);
394
395	radv_descriptor_set_destroy(device, NULL, set, false);
396}
397
398VkResult radv_CreateDescriptorPool(
399	VkDevice                                    _device,
400	const VkDescriptorPoolCreateInfo*           pCreateInfo,
401	const VkAllocationCallbacks*                pAllocator,
402	VkDescriptorPool*                           pDescriptorPool)
403{
404	RADV_FROM_HANDLE(radv_device, device, _device);
405	struct radv_descriptor_pool *pool;
406	unsigned max_sets = pCreateInfo->maxSets * 2;
407	int size = sizeof(struct radv_descriptor_pool) +
408	           max_sets * sizeof(struct radv_descriptor_pool_free_node);
409	uint64_t bo_size = 0;
410	pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
411			   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
412	if (!pool)
413		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
414
415	memset(pool, 0, sizeof(*pool));
416
417	pool->free_list = -1;
418	pool->full_list = 0;
419	pool->free_nodes[max_sets - 1].next = -1;
420	pool->max_sets = max_sets;
421	pool->allocated_sets = 0;
422
423	for (int i = 0; i  + 1 < max_sets; ++i)
424		pool->free_nodes[i].next = i + 1;
425
426	for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
427		switch(pCreateInfo->pPoolSizes[i].type) {
428		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
429		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
430			break;
431		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
432		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
433		case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
434		case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
435		case VK_DESCRIPTOR_TYPE_SAMPLER:
436			/* 32 as we may need to align for images */
437			bo_size += 32 * pCreateInfo->pPoolSizes[i].descriptorCount;
438			break;
439		case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
440		case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
441		case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
442			bo_size += 64 * pCreateInfo->pPoolSizes[i].descriptorCount;
443			break;
444		case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
445			bo_size += 96 * pCreateInfo->pPoolSizes[i].descriptorCount;
446			break;
447		default:
448			unreachable("unknown descriptor type\n");
449			break;
450		}
451	}
452
453	if (bo_size) {
454		pool->bo = device->ws->buffer_create(device->ws, bo_size,
455							32, RADEON_DOMAIN_VRAM, 0);
456		pool->mapped_ptr = (uint8_t*)device->ws->buffer_map(pool->bo);
457	}
458	pool->size = bo_size;
459
460	list_inithead(&pool->descriptor_sets);
461	*pDescriptorPool = radv_descriptor_pool_to_handle(pool);
462	return VK_SUCCESS;
463}
464
465void radv_DestroyDescriptorPool(
466	VkDevice                                    _device,
467	VkDescriptorPool                            _pool,
468	const VkAllocationCallbacks*                pAllocator)
469{
470	RADV_FROM_HANDLE(radv_device, device, _device);
471	RADV_FROM_HANDLE(radv_descriptor_pool, pool, _pool);
472
473	if (!pool)
474		return;
475
476	list_for_each_entry_safe(struct radv_descriptor_set, set,
477				 &pool->descriptor_sets, descriptor_pool) {
478		radv_descriptor_set_destroy(device, pool, set, false);
479	}
480
481	if (pool->bo)
482		device->ws->buffer_destroy(pool->bo);
483	vk_free2(&device->alloc, pAllocator, pool);
484}
485
486VkResult radv_ResetDescriptorPool(
487	VkDevice                                    _device,
488	VkDescriptorPool                            descriptorPool,
489	VkDescriptorPoolResetFlags                  flags)
490{
491	RADV_FROM_HANDLE(radv_device, device, _device);
492	RADV_FROM_HANDLE(radv_descriptor_pool, pool, descriptorPool);
493
494	list_for_each_entry_safe(struct radv_descriptor_set, set,
495				 &pool->descriptor_sets, descriptor_pool) {
496		radv_descriptor_set_destroy(device, pool, set, false);
497	}
498
499	pool->allocated_sets = 0;
500	pool->current_offset = 0;
501	pool->free_list = -1;
502	pool->full_list = 0;
503	pool->free_nodes[pool->max_sets - 1].next = -1;
504
505	for (int i = 0; i  + 1 < pool->max_sets; ++i)
506		pool->free_nodes[i].next = i + 1;
507
508	return VK_SUCCESS;
509}
510
511VkResult radv_AllocateDescriptorSets(
512	VkDevice                                    _device,
513	const VkDescriptorSetAllocateInfo*          pAllocateInfo,
514	VkDescriptorSet*                            pDescriptorSets)
515{
516	RADV_FROM_HANDLE(radv_device, device, _device);
517	RADV_FROM_HANDLE(radv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
518
519	VkResult result = VK_SUCCESS;
520	uint32_t i;
521	struct radv_descriptor_set *set;
522
523	/* allocate a set of buffers for each shader to contain descriptors */
524	for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
525		RADV_FROM_HANDLE(radv_descriptor_set_layout, layout,
526				 pAllocateInfo->pSetLayouts[i]);
527
528		result = radv_descriptor_set_create(device, pool, NULL, layout, &set);
529		if (result != VK_SUCCESS)
530			break;
531
532		pDescriptorSets[i] = radv_descriptor_set_to_handle(set);
533	}
534
535	if (result != VK_SUCCESS)
536		radv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
537					i, pDescriptorSets);
538	return result;
539}
540
541VkResult radv_FreeDescriptorSets(
542	VkDevice                                    _device,
543	VkDescriptorPool                            descriptorPool,
544	uint32_t                                    count,
545	const VkDescriptorSet*                      pDescriptorSets)
546{
547	RADV_FROM_HANDLE(radv_device, device, _device);
548	RADV_FROM_HANDLE(radv_descriptor_pool, pool, descriptorPool);
549
550	for (uint32_t i = 0; i < count; i++) {
551		RADV_FROM_HANDLE(radv_descriptor_set, set, pDescriptorSets[i]);
552
553		if (set)
554			radv_descriptor_set_destroy(device, pool, set, true);
555	}
556	return VK_SUCCESS;
557}
558
559static void write_texel_buffer_descriptor(struct radv_device *device,
560					  unsigned *dst,
561					  struct radeon_winsys_bo **buffer_list,
562					  const VkBufferView _buffer_view)
563{
564	RADV_FROM_HANDLE(radv_buffer_view, buffer_view, _buffer_view);
565
566	memcpy(dst, buffer_view->state, 4 * 4);
567	*buffer_list = buffer_view->bo;
568}
569
570static void write_buffer_descriptor(struct radv_device *device,
571                                    unsigned *dst,
572                                    struct radeon_winsys_bo **buffer_list,
573                                    const VkDescriptorBufferInfo *buffer_info)
574{
575	RADV_FROM_HANDLE(radv_buffer, buffer, buffer_info->buffer);
576	uint64_t va = device->ws->buffer_get_va(buffer->bo);
577	uint32_t range = buffer_info->range;
578
579	if (buffer_info->range == VK_WHOLE_SIZE)
580		range = buffer->size - buffer_info->offset;
581
582	va += buffer_info->offset + buffer->offset;
583	dst[0] = va;
584	dst[1] = S_008F04_BASE_ADDRESS_HI(va >> 32);
585	dst[2] = range;
586	dst[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
587		S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
588		S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
589		S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W) |
590		S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_FLOAT) |
591		S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
592
593	*buffer_list = buffer->bo;
594}
595
596static void write_dynamic_buffer_descriptor(struct radv_device *device,
597                                            struct radv_descriptor_range *range,
598                                            struct radeon_winsys_bo **buffer_list,
599                                            const VkDescriptorBufferInfo *buffer_info)
600{
601	RADV_FROM_HANDLE(radv_buffer, buffer, buffer_info->buffer);
602	uint64_t va = device->ws->buffer_get_va(buffer->bo);
603	unsigned size = buffer_info->range;
604
605	if (buffer_info->range == VK_WHOLE_SIZE)
606		size = buffer->size - buffer_info->offset;
607
608	va += buffer_info->offset + buffer->offset;
609	range->va = va;
610	range->size = size;
611
612	*buffer_list = buffer->bo;
613}
614
615static void
616write_image_descriptor(struct radv_device *device,
617		       unsigned *dst,
618		       struct radeon_winsys_bo **buffer_list,
619		       const VkDescriptorImageInfo *image_info)
620{
621	RADV_FROM_HANDLE(radv_image_view, iview, image_info->imageView);
622	memcpy(dst, iview->descriptor, 8 * 4);
623	memcpy(dst + 8, iview->fmask_descriptor, 8 * 4);
624	*buffer_list = iview->bo;
625}
626
627static void
628write_combined_image_sampler_descriptor(struct radv_device *device,
629					unsigned *dst,
630					struct radeon_winsys_bo **buffer_list,
631					const VkDescriptorImageInfo *image_info,
632					bool has_sampler)
633{
634	RADV_FROM_HANDLE(radv_sampler, sampler, image_info->sampler);
635
636	write_image_descriptor(device, dst, buffer_list, image_info);
637	/* copy over sampler state */
638	if (has_sampler)
639		memcpy(dst + 16, sampler->state, 16);
640}
641
642static void
643write_sampler_descriptor(struct radv_device *device,
644					unsigned *dst,
645					const VkDescriptorImageInfo *image_info)
646{
647	RADV_FROM_HANDLE(radv_sampler, sampler, image_info->sampler);
648
649	memcpy(dst, sampler->state, 16);
650}
651
652void radv_UpdateDescriptorSets(
653	VkDevice                                    _device,
654	uint32_t                                    descriptorWriteCount,
655	const VkWriteDescriptorSet*                 pDescriptorWrites,
656	uint32_t                                    descriptorCopyCount,
657	const VkCopyDescriptorSet*                  pDescriptorCopies)
658{
659	RADV_FROM_HANDLE(radv_device, device, _device);
660	uint32_t i, j;
661	for (i = 0; i < descriptorWriteCount; i++) {
662		const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
663		RADV_FROM_HANDLE(radv_descriptor_set, set, writeset->dstSet);
664		const struct radv_descriptor_set_binding_layout *binding_layout =
665			set->layout->binding + writeset->dstBinding;
666		uint32_t *ptr = set->mapped_ptr;
667		struct radeon_winsys_bo **buffer_list =  set->descriptors;
668
669		ptr += binding_layout->offset / 4;
670		ptr += binding_layout->size * writeset->dstArrayElement / 4;
671		buffer_list += binding_layout->buffer_offset;
672		buffer_list += binding_layout->buffer_count * writeset->dstArrayElement;
673		for (j = 0; j < writeset->descriptorCount; ++j) {
674			switch(writeset->descriptorType) {
675			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
676			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
677				unsigned idx = writeset->dstArrayElement + j;
678				idx += binding_layout->dynamic_offset_offset;
679				write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
680								buffer_list, writeset->pBufferInfo + j);
681				break;
682			}
683			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
684			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
685				write_buffer_descriptor(device, ptr, buffer_list,
686							writeset->pBufferInfo + j);
687				break;
688			case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
689			case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
690				write_texel_buffer_descriptor(device, ptr, buffer_list,
691							      writeset->pTexelBufferView[j]);
692				break;
693			case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
694			case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
695			case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
696				write_image_descriptor(device, ptr, buffer_list,
697						       writeset->pImageInfo + j);
698				break;
699			case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
700				write_combined_image_sampler_descriptor(device, ptr, buffer_list,
701									writeset->pImageInfo + j,
702									!binding_layout->immutable_samplers);
703				break;
704			case VK_DESCRIPTOR_TYPE_SAMPLER:
705				assert(!binding_layout->immutable_samplers);
706				write_sampler_descriptor(device, ptr,
707							 writeset->pImageInfo + j);
708				break;
709			default:
710				unreachable("unimplemented descriptor type");
711				break;
712			}
713			ptr += binding_layout->size / 4;
714			buffer_list += binding_layout->buffer_count;
715		}
716
717	}
718	if (descriptorCopyCount)
719		radv_finishme("copy descriptors");
720}
721