1/*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 *      http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Synchronization operation abstraction
22 *//*--------------------------------------------------------------------*/
23
24#include "vktSynchronizationOperation.hpp"
25#include "vkDefs.hpp"
26#include "vktTestCase.hpp"
27#include "vktTestCaseUtil.hpp"
28#include "vkRef.hpp"
29#include "vkRefUtil.hpp"
30#include "vkMemUtil.hpp"
31#include "vkQueryUtil.hpp"
32#include "vkTypeUtil.hpp"
33#include "vkImageUtil.hpp"
34#include "vkBuilderUtil.hpp"
35#include "deUniquePtr.hpp"
36#include "tcuTestLog.hpp"
37#include "tcuTextureUtil.hpp"
38#include <vector>
39#include <sstream>
40
41namespace vkt
42{
43namespace synchronization
44{
45namespace
46{
47using namespace vk;
48
49enum Constants
50{
51	MAX_IMAGE_DIMENSION_2D	= 0x1000u,
52	MAX_UBO_RANGE			= 0x4000u,
53	MAX_UPDATE_BUFFER_SIZE	= 0x10000u,
54};
55
56enum BufferType
57{
58	BUFFER_TYPE_UNIFORM,
59	BUFFER_TYPE_STORAGE,
60};
61
62enum AccessMode
63{
64	ACCESS_MODE_READ,
65	ACCESS_MODE_WRITE,
66};
67
68enum PipelineType
69{
70	PIPELINE_TYPE_GRAPHICS,
71	PIPELINE_TYPE_COMPUTE,
72};
73
74static const char* const s_perVertexBlock =	"gl_PerVertex {\n"
75											"    vec4 gl_Position;\n"
76											"}";
77
78//! A pipeline that can be embedded inside an operation.
79class Pipeline
80{
81public:
82	virtual			~Pipeline		(void) {}
83	virtual void	recordCommands	(OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet) = 0;
84};
85
86//! Vertex data that covers the whole viewport with two triangles.
87class VertexGrid
88{
89public:
90	VertexGrid (OperationContext& context)
91		: m_vertexFormat (VK_FORMAT_R32G32B32A32_SFLOAT)
92		, m_vertexStride (tcu::getPixelSize(mapVkFormat(m_vertexFormat)))
93	{
94		const DeviceInterface&	vk			= context.getDeviceInterface();
95		const VkDevice			device		= context.getDevice();
96		Allocator&				allocator	= context.getAllocator();
97
98		// Vertex positions
99		{
100			m_vertexData.push_back(tcu::Vec4( 1.0f,  1.0f, 0.0f, 1.0f));
101			m_vertexData.push_back(tcu::Vec4(-1.0f,  1.0f, 0.0f, 1.0f));
102			m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
103
104			m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
105			m_vertexData.push_back(tcu::Vec4( 1.0f, -1.0f, 0.0f, 1.0f));
106			m_vertexData.push_back(tcu::Vec4( 1.0f,  1.0f, 0.0f, 1.0f));
107		}
108
109		{
110			const VkDeviceSize vertexDataSizeBytes = m_vertexData.size() * sizeof(m_vertexData[0]);
111
112			m_vertexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexDataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
113			DE_ASSERT(sizeof(m_vertexData[0]) == m_vertexStride);
114
115			{
116				const Allocation& alloc = m_vertexBuffer->getAllocation();
117
118				deMemcpy(alloc.getHostPtr(), &m_vertexData[0], static_cast<std::size_t>(vertexDataSizeBytes));
119				flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), vertexDataSizeBytes);
120			}
121		}
122
123		// Indices
124		{
125			const VkDeviceSize	indexBufferSizeBytes	= sizeof(deUint32) * m_vertexData.size();
126			const deUint32		numIndices				= static_cast<deUint32>(m_vertexData.size());
127
128			m_indexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(indexBufferSizeBytes, VK_BUFFER_USAGE_INDEX_BUFFER_BIT), MemoryRequirement::HostVisible));
129
130			{
131				const Allocation&	alloc	= m_indexBuffer->getAllocation();
132				deUint32* const		pData	= static_cast<deUint32*>(alloc.getHostPtr());
133
134				for (deUint32 i = 0; i < numIndices; ++i)
135					pData[i] = i;
136
137				flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), indexBufferSizeBytes);
138			}
139		}
140	}
141
142	VkFormat	getVertexFormat		(void) const { return m_vertexFormat; }
143	deUint32	getVertexStride		(void) const { return m_vertexStride; }
144	VkIndexType getIndexType		(void) const { return VK_INDEX_TYPE_UINT32; }
145	deUint32	getNumVertices		(void) const { return static_cast<deUint32>(m_vertexData.size()); }
146	deUint32	getNumIndices		(void) const { return getNumVertices(); }
147	VkBuffer	getVertexBuffer		(void) const { return **m_vertexBuffer; }
148	VkBuffer	getIndexBuffer		(void) const { return **m_indexBuffer; }
149
150private:
151	const VkFormat				m_vertexFormat;
152	const deUint32				m_vertexStride;
153	std::vector<tcu::Vec4>		m_vertexData;
154	de::MovePtr<Buffer>			m_vertexBuffer;
155	de::MovePtr<Buffer>			m_indexBuffer;
156};
157
158//! Add flags for all shader stages required to support a particular stage (e.g. fragment requires vertex as well).
159VkShaderStageFlags getRequiredStages (const VkShaderStageFlagBits stage)
160{
161	VkShaderStageFlags flags = 0;
162
163	DE_ASSERT(stage == VK_SHADER_STAGE_COMPUTE_BIT || (stage & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
164
165	if (stage & VK_SHADER_STAGE_ALL_GRAPHICS)
166		flags |= VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
167
168	if (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
169		flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
170
171	if (stage & VK_SHADER_STAGE_GEOMETRY_BIT)
172		flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
173
174	if (stage & VK_SHADER_STAGE_COMPUTE_BIT)
175		flags |= VK_SHADER_STAGE_COMPUTE_BIT;
176
177	return flags;
178}
179
180//! Check that SSBO read/write is available and that all shader stages are supported.
181void requireFeaturesForSSBOAccess (OperationContext& context, const VkShaderStageFlags usedStages)
182{
183	const InstanceInterface&	vki			= context.getInstanceInterface();
184	const VkPhysicalDevice		physDevice	= context.getPhysicalDevice();
185	FeatureFlags				flags		= (FeatureFlags)0;
186
187	if (usedStages & VK_SHADER_STAGE_FRAGMENT_BIT)
188		flags |= FEATURE_FRAGMENT_STORES_AND_ATOMICS;
189
190	if (usedStages & (VK_SHADER_STAGE_ALL_GRAPHICS & (~VK_SHADER_STAGE_FRAGMENT_BIT)))
191		flags |= FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS;
192
193	if (usedStages & VK_SHADER_STAGE_GEOMETRY_BIT)
194		flags |= FEATURE_GEOMETRY_SHADER;
195
196	if (usedStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
197		flags |= FEATURE_TESSELLATION_SHADER;
198
199	requireFeatures(vki, physDevice, flags);
200}
201
202Data getHostBufferData (const OperationContext& context, const Buffer& hostBuffer, const VkDeviceSize size)
203{
204	const DeviceInterface&	vk		= context.getDeviceInterface();
205	const VkDevice			device	= context.getDevice();
206	const Allocation&		alloc	= hostBuffer.getAllocation();
207	const Data				data	=
208	{
209		static_cast<std::size_t>(size),					// std::size_t		size;
210		static_cast<deUint8*>(alloc.getHostPtr()),		// const deUint8*	data;
211	};
212
213	invalidateMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), size);
214
215	return data;
216}
217
218void assertValidShaderStage (const VkShaderStageFlagBits stage)
219{
220	switch (stage)
221	{
222		case VK_SHADER_STAGE_VERTEX_BIT:
223		case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
224		case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
225		case VK_SHADER_STAGE_GEOMETRY_BIT:
226		case VK_SHADER_STAGE_FRAGMENT_BIT:
227		case VK_SHADER_STAGE_COMPUTE_BIT:
228			// OK
229			break;
230
231		default:
232			DE_FATAL("Invalid shader stage");
233			break;
234	}
235}
236
237VkPipelineStageFlags pipelineStageFlagsFromShaderStageFlagBits (const VkShaderStageFlagBits shaderStage)
238{
239	switch (shaderStage)
240	{
241		case VK_SHADER_STAGE_VERTEX_BIT:					return VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
242		case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:		return VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT;
243		case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:	return VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;
244		case VK_SHADER_STAGE_GEOMETRY_BIT:					return VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
245		case VK_SHADER_STAGE_FRAGMENT_BIT:					return VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
246		case VK_SHADER_STAGE_COMPUTE_BIT:					return VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
247
248		// Other usages are probably an error, so flag that.
249		default:
250			DE_FATAL("Invalid shader stage");
251			return (VkPipelineStageFlags)0;
252	}
253}
254
255//! Fill destination buffer with a repeating pattern.
256void fillPattern (void* const pData, const VkDeviceSize size)
257{
258	static const deUint8	pattern[]	= { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31 };
259	deUint8* const			pBytes		= static_cast<deUint8*>(pData);
260
261	for (deUint32 i = 0; i < size; ++i)
262		pBytes[i] = pattern[i % DE_LENGTH_OF_ARRAY(pattern)];
263}
264
265//! Get size in bytes of a pixel buffer with given extent.
266VkDeviceSize getPixelBufferSize (const VkFormat format, const VkExtent3D& extent)
267{
268	const int pixelSize = tcu::getPixelSize(mapVkFormat(format));
269	return (pixelSize * extent.width * extent.height * extent.depth);
270}
271
272//! Determine the size of a 2D image that can hold sizeBytes data.
273VkExtent3D get2DImageExtentWithSize (const VkDeviceSize sizeBytes, const deUint32 pixelSize)
274{
275	const deUint32 size = static_cast<deUint32>(sizeBytes / pixelSize);
276
277	DE_ASSERT(size <= MAX_IMAGE_DIMENSION_2D * MAX_IMAGE_DIMENSION_2D);
278
279	return makeExtent3D(
280		std::min(size, static_cast<deUint32>(MAX_IMAGE_DIMENSION_2D)),
281		(size / MAX_IMAGE_DIMENSION_2D) + (size % MAX_IMAGE_DIMENSION_2D != 0 ? 1u : 0u),
282		1u);
283}
284
285VkClearValue makeClearValue (const VkFormat format)
286{
287	if (isDepthStencilFormat(format))
288		return makeClearValueDepthStencil(0.4f, 21u);
289	else
290	{
291		if (isIntFormat(format) || isUintFormat(format))
292			return makeClearValueColorU32(8u, 16u, 24u, 32u);
293		else
294			return makeClearValueColorF32(0.25f, 0.49f, 0.75f, 1.0f);
295	}
296}
297
298void clearPixelBuffer (tcu::PixelBufferAccess& pixels, const VkClearValue& clearValue)
299{
300	const tcu::TextureFormat		format			= pixels.getFormat();
301	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
302
303	if (format.order == tcu::TextureFormat::D)
304	{
305		for (int z = 0; z < pixels.getDepth(); z++)
306		for (int y = 0; y < pixels.getHeight(); y++)
307		for (int x = 0; x < pixels.getWidth(); x++)
308			pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
309	}
310	else if (format.order == tcu::TextureFormat::S)
311	{
312		for (int z = 0; z < pixels.getDepth(); z++)
313		for (int y = 0; y < pixels.getHeight(); y++)
314		for (int x = 0; x < pixels.getWidth(); x++)
315			pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
316	}
317	else if (format.order == tcu::TextureFormat::DS)
318	{
319		for (int z = 0; z < pixels.getDepth(); z++)
320		for (int y = 0; y < pixels.getHeight(); y++)
321		for (int x = 0; x < pixels.getWidth(); x++)
322		{
323			pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
324			pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
325		}
326	}
327	else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER || channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
328	{
329		const tcu::UVec4 color (clearValue.color.uint32);
330
331		for (int z = 0; z < pixels.getDepth(); z++)
332		for (int y = 0; y < pixels.getHeight(); y++)
333		for (int x = 0; x < pixels.getWidth(); x++)
334			pixels.setPixel(color, x, y, z);
335	}
336	else
337	{
338		const tcu::Vec4 color (clearValue.color.float32);
339
340		for (int z = 0; z < pixels.getDepth(); z++)
341		for (int y = 0; y < pixels.getHeight(); y++)
342		for (int x = 0; x < pixels.getWidth(); x++)
343			pixels.setPixel(color, x, y, z);
344	}
345}
346
347//! Storage image format that requires StorageImageExtendedFormats SPIR-V capability (listed only Vulkan-defined formats).
348bool isStorageImageExtendedFormat (const VkFormat format)
349{
350	switch (format)
351	{
352		case VK_FORMAT_R32G32_SFLOAT:
353		case VK_FORMAT_R32G32_SINT:
354		case VK_FORMAT_R32G32_UINT:
355		case VK_FORMAT_R16G16B16A16_UNORM:
356		case VK_FORMAT_R16G16B16A16_SNORM:
357		case VK_FORMAT_R16G16_SFLOAT:
358		case VK_FORMAT_R16G16_UNORM:
359		case VK_FORMAT_R16G16_SNORM:
360		case VK_FORMAT_R16G16_SINT:
361		case VK_FORMAT_R16G16_UINT:
362		case VK_FORMAT_R16_SFLOAT:
363		case VK_FORMAT_R16_UNORM:
364		case VK_FORMAT_R16_SNORM:
365		case VK_FORMAT_R16_SINT:
366		case VK_FORMAT_R16_UINT:
367		case VK_FORMAT_R8G8_UNORM:
368		case VK_FORMAT_R8G8_SNORM:
369		case VK_FORMAT_R8G8_SINT:
370		case VK_FORMAT_R8G8_UINT:
371		case VK_FORMAT_R8_UNORM:
372		case VK_FORMAT_R8_SNORM:
373		case VK_FORMAT_R8_SINT:
374		case VK_FORMAT_R8_UINT:
375			return true;
376
377		default:
378			return false;
379	}
380}
381
382VkImageViewType getImageViewType (const VkImageType imageType)
383{
384	switch (imageType)
385	{
386		case VK_IMAGE_TYPE_1D:		return VK_IMAGE_VIEW_TYPE_1D;
387		case VK_IMAGE_TYPE_2D:		return VK_IMAGE_VIEW_TYPE_2D;
388		case VK_IMAGE_TYPE_3D:		return VK_IMAGE_VIEW_TYPE_3D;
389
390		default:
391			DE_FATAL("Unknown image type");
392			return VK_IMAGE_VIEW_TYPE_LAST;
393	}
394}
395
396std::string getShaderImageType (const VkFormat format, const VkImageType imageType)
397{
398	const tcu::TextureFormat	texFormat	= mapVkFormat(format);
399	const std::string			formatPart	= tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
400											  tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER   ? "i" : "";
401	switch (imageType)
402	{
403		case VK_IMAGE_TYPE_1D:	return formatPart + "image1D";
404		case VK_IMAGE_TYPE_2D:	return formatPart + "image2D";
405		case VK_IMAGE_TYPE_3D:	return formatPart + "image3D";
406
407		default:
408			DE_FATAL("Unknown image type");
409			return DE_NULL;
410	}
411}
412
413std::string getShaderImageFormatQualifier (const VkFormat format)
414{
415	const tcu::TextureFormat	texFormat	= mapVkFormat(format);
416	const char*					orderPart	= DE_NULL;
417	const char*					typePart	= DE_NULL;
418
419	switch (texFormat.order)
420	{
421		case tcu::TextureFormat::R:		orderPart = "r";	break;
422		case tcu::TextureFormat::RG:	orderPart = "rg";	break;
423		case tcu::TextureFormat::RGB:	orderPart = "rgb";	break;
424		case tcu::TextureFormat::RGBA:	orderPart = "rgba";	break;
425
426		default:
427			DE_FATAL("Unksupported texture channel order");
428			break;
429	}
430
431	switch (texFormat.type)
432	{
433		case tcu::TextureFormat::FLOAT:				typePart = "32f";		break;
434		case tcu::TextureFormat::HALF_FLOAT:		typePart = "16f";		break;
435
436		case tcu::TextureFormat::UNSIGNED_INT32:	typePart = "32ui";		break;
437		case tcu::TextureFormat::UNSIGNED_INT16:	typePart = "16ui";		break;
438		case tcu::TextureFormat::UNSIGNED_INT8:		typePart = "8ui";		break;
439
440		case tcu::TextureFormat::SIGNED_INT32:		typePart = "32i";		break;
441		case tcu::TextureFormat::SIGNED_INT16:		typePart = "16i";		break;
442		case tcu::TextureFormat::SIGNED_INT8:		typePart = "8i";		break;
443
444		case tcu::TextureFormat::UNORM_INT16:		typePart = "16";		break;
445		case tcu::TextureFormat::UNORM_INT8:		typePart = "8";			break;
446
447		case tcu::TextureFormat::SNORM_INT16:		typePart = "16_snorm";	break;
448		case tcu::TextureFormat::SNORM_INT8:		typePart = "8_snorm";	break;
449
450		default:
451			DE_FATAL("Unksupported texture channel type");
452			break;
453	}
454
455	return std::string(orderPart) + typePart;
456}
457
458namespace FillUpdateBuffer
459{
460
461enum BufferOp
462{
463	BUFFER_OP_FILL,
464	BUFFER_OP_UPDATE,
465};
466
467class Implementation : public Operation
468{
469public:
470	Implementation (OperationContext& context, Resource& resource, const BufferOp bufferOp)
471		: m_context		(context)
472		, m_resource	(resource)
473		, m_fillValue	(0x13)
474		, m_bufferOp	(bufferOp)
475	{
476		DE_ASSERT((m_resource.getBuffer().size % sizeof(deUint32)) == 0);
477		DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_resource.getBuffer().size <= MAX_UPDATE_BUFFER_SIZE);
478
479		m_data.resize(static_cast<size_t>(m_resource.getBuffer().size));
480
481		if (m_bufferOp == BUFFER_OP_FILL)
482		{
483			const std::size_t	size	= m_data.size() / sizeof(m_fillValue);
484			deUint32* const		pData	= reinterpret_cast<deUint32*>(&m_data[0]);
485
486			for (deUint32 i = 0; i < size; ++i)
487				pData[i] = m_fillValue;
488		}
489		else if (m_bufferOp == BUFFER_OP_UPDATE)
490		{
491			fillPattern(&m_data[0], m_data.size());
492		}
493		else
494		{
495			// \todo Really??
496			// Do nothing
497		}
498	}
499
500	void recordCommands (const VkCommandBuffer cmdBuffer)
501	{
502		const DeviceInterface&	vk	= m_context.getDeviceInterface();
503
504		if (m_bufferOp == BUFFER_OP_FILL)
505			vk.cmdFillBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_fillValue);
506		else if (m_bufferOp == BUFFER_OP_UPDATE)
507			vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, reinterpret_cast<deUint32*>(&m_data[0]));
508		else
509		{
510			// \todo Really??
511			// Do nothing
512		}
513	}
514
515	SyncInfo getSyncInfo (void) const
516	{
517		const SyncInfo syncInfo =
518		{
519			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
520			VK_ACCESS_TRANSFER_WRITE_BIT,		// VkAccessFlags			accessMask;
521			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout			imageLayout;
522		};
523
524		return syncInfo;
525	}
526
527	Data getData (void) const
528	{
529		const Data data =
530		{
531			m_data.size(),		// std::size_t		size;
532			&m_data[0],			// const deUint8*	data;
533		};
534		return data;
535	}
536
537private:
538	OperationContext&		m_context;
539	Resource&				m_resource;
540	std::vector<deUint8>	m_data;
541	const deUint32			m_fillValue;
542	const BufferOp			m_bufferOp;
543};
544
545class Support : public OperationSupport
546{
547public:
548	Support (const ResourceDescription& resourceDesc, const BufferOp bufferOp)
549		: m_resourceDesc	(resourceDesc)
550		, m_bufferOp		(bufferOp)
551	{
552		DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_bufferOp == BUFFER_OP_UPDATE);
553		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
554	}
555
556	deUint32 getResourceUsageFlags (void) const
557	{
558		return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
559	}
560
561	VkQueueFlags getQueueFlags (const OperationContext& context) const
562	{
563		if (m_bufferOp == BUFFER_OP_FILL &&
564			!isDeviceExtensionSupported(context.getUsedApiVersion(), context.getDeviceExtensions(), "VK_KHR_maintenance1"))
565		{
566			return VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
567		}
568
569		return VK_QUEUE_TRANSFER_BIT;
570	}
571
572	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
573	{
574		return de::MovePtr<Operation>(new Implementation(context, resource, m_bufferOp));
575	}
576
577private:
578	const ResourceDescription	m_resourceDesc;
579	const BufferOp				m_bufferOp;
580};
581
582} // FillUpdateBuffer ns
583
584namespace CopyBuffer
585{
586
587class Implementation : public Operation
588{
589public:
590	Implementation (OperationContext& context, Resource& resource, const AccessMode mode)
591		: m_context		(context)
592		, m_resource	(resource)
593		, m_mode		(mode)
594	{
595		const DeviceInterface&		vk				= m_context.getDeviceInterface();
596		const VkDevice				device			= m_context.getDevice();
597		Allocator&					allocator		= m_context.getAllocator();
598		const VkBufferUsageFlags	hostBufferUsage	= (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
599
600		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, hostBufferUsage), MemoryRequirement::HostVisible));
601
602		const Allocation& alloc = m_hostBuffer->getAllocation();
603
604		if (m_mode == ACCESS_MODE_READ)
605			deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
606		else
607			fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
608
609		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_resource.getBuffer().size);
610	}
611
612	void recordCommands (const VkCommandBuffer cmdBuffer)
613	{
614		const DeviceInterface&	vk			= m_context.getDeviceInterface();
615		const VkBufferCopy		copyRegion	= makeBufferCopy(0u, 0u, m_resource.getBuffer().size);
616
617		if (m_mode == ACCESS_MODE_READ)
618		{
619			vk.cmdCopyBuffer(cmdBuffer, m_resource.getBuffer().handle, **m_hostBuffer, 1u, &copyRegion);
620
621			// Insert a barrier so copied data is available to the host
622			const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_resource.getBuffer().size);
623			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
624		}
625		else
626			vk.cmdCopyBuffer(cmdBuffer, **m_hostBuffer, m_resource.getBuffer().handle, 1u, &copyRegion);
627	}
628
629	SyncInfo getSyncInfo (void) const
630	{
631		const VkAccessFlags access		= (m_mode == ACCESS_MODE_READ ? VK_ACCESS_TRANSFER_READ_BIT : VK_ACCESS_TRANSFER_WRITE_BIT);
632		const SyncInfo		syncInfo	=
633		{
634			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
635			access,								// VkAccessFlags			accessMask;
636			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout			imageLayout;
637		};
638		return syncInfo;
639	}
640
641	Data getData (void) const
642	{
643		return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
644	}
645
646private:
647	OperationContext&		m_context;
648	Resource&				m_resource;
649	const AccessMode		m_mode;
650	de::MovePtr<Buffer>		m_hostBuffer;
651};
652
653class Support : public OperationSupport
654{
655public:
656	Support (const ResourceDescription& resourceDesc, const AccessMode mode)
657		: m_mode			(mode)
658	{
659		DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
660		DE_UNREF(resourceDesc);
661	}
662
663	deUint32 getResourceUsageFlags (void) const
664	{
665		return (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : VK_BUFFER_USAGE_TRANSFER_DST_BIT);
666	}
667
668	VkQueueFlags getQueueFlags (const OperationContext& context) const
669	{
670		DE_UNREF(context);
671		return VK_QUEUE_TRANSFER_BIT;
672	}
673
674	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
675	{
676		return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
677	}
678
679private:
680	const AccessMode			m_mode;
681};
682
683} // CopyBuffer ns
684
685namespace CopyBlitImage
686{
687
688class ImplementationBase : public Operation
689{
690public:
691	//! Copy/Blit/Resolve etc. operation
692	virtual void recordCopyCommand (const VkCommandBuffer cmdBuffer) = 0;
693
694	ImplementationBase (OperationContext& context, Resource& resource, const AccessMode mode)
695		: m_context		(context)
696		, m_resource	(resource)
697		, m_mode		(mode)
698		, m_bufferSize	(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
699	{
700		const DeviceInterface&	vk			= m_context.getDeviceInterface();
701		const VkDevice			device		= m_context.getDevice();
702		Allocator&				allocator	= m_context.getAllocator();
703
704		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
705			vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
706			MemoryRequirement::HostVisible));
707
708		const Allocation& alloc = m_hostBuffer->getAllocation();
709		if (m_mode == ACCESS_MODE_READ)
710			deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
711		else
712			fillPattern(alloc.getHostPtr(), m_bufferSize);
713		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_bufferSize);
714
715		// Staging image
716		m_image = de::MovePtr<Image>(new Image(
717			vk, device, allocator,
718			makeImageCreateInfo(m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
719			MemoryRequirement::Any));
720	}
721
722	void recordCommands (const VkCommandBuffer cmdBuffer)
723	{
724		const DeviceInterface&		vk					= m_context.getDeviceInterface();
725		const VkBufferImageCopy		bufferCopyRegion	= makeBufferImageCopy(m_resource.getImage().subresourceLayers, m_resource.getImage().extent);
726
727		const VkImageMemoryBarrier stagingImageTransferSrcLayoutBarrier = makeImageMemoryBarrier(
728			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
729			VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
730			**m_image, m_resource.getImage().subresourceRange);
731
732		// Staging image layout
733		{
734			const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
735				(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
736				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
737				**m_image, m_resource.getImage().subresourceRange);
738
739			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
740				0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
741		}
742
743		if (m_mode == ACCESS_MODE_READ)
744		{
745			// Resource Image -> Staging image
746			recordCopyCommand(cmdBuffer);
747
748			// Staging image layout
749			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
750				0u, DE_NULL, 0u, DE_NULL, 1u, &stagingImageTransferSrcLayoutBarrier);
751
752			// Image -> Host buffer
753			vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
754
755			// Insert a barrier so copied data is available to the host
756			const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_bufferSize);
757			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
758		}
759		else
760		{
761			// Host buffer -> Staging image
762			vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
763
764			// Staging image layout
765			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
766				0u, DE_NULL, 0u, DE_NULL, 1u, &stagingImageTransferSrcLayoutBarrier);
767
768			// Resource image layout
769			{
770				const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
771					(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
772					VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
773					m_resource.getImage().handle, m_resource.getImage().subresourceRange);
774
775				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
776					0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
777			}
778
779			// Staging image -> Resource Image
780			recordCopyCommand(cmdBuffer);
781		}
782	}
783
784	SyncInfo getSyncInfo (void) const
785	{
786		const VkAccessFlags access		= (m_mode == ACCESS_MODE_READ ? VK_ACCESS_TRANSFER_READ_BIT : VK_ACCESS_TRANSFER_WRITE_BIT);
787		const VkImageLayout layout		= (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
788		const SyncInfo		syncInfo	=
789		{
790			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
791			access,								// VkAccessFlags			accessMask;
792			layout,								// VkImageLayout			imageLayout;
793		};
794		return syncInfo;
795	}
796
797	Data getData (void) const
798	{
799		return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
800	}
801
802protected:
803	OperationContext&		m_context;
804	Resource&				m_resource;
805	const AccessMode		m_mode;
806	const VkDeviceSize		m_bufferSize;
807	de::MovePtr<Buffer>		m_hostBuffer;
808	de::MovePtr<Image>		m_image;
809};
810
811VkOffset3D makeExtentOffset (const Resource& resource)
812{
813	DE_ASSERT(resource.getType() == RESOURCE_TYPE_IMAGE);
814	const VkExtent3D extent = resource.getImage().extent;
815
816	switch (resource.getImage().imageType)
817	{
818		case VK_IMAGE_TYPE_1D:	return makeOffset3D(extent.width, 1, 1);
819		case VK_IMAGE_TYPE_2D:	return makeOffset3D(extent.width, extent.height, 1);
820		case VK_IMAGE_TYPE_3D:	return makeOffset3D(extent.width, extent.height, extent.depth);
821		default:
822			DE_ASSERT(0);
823			return VkOffset3D();
824	}
825}
826
827VkImageBlit makeBlitRegion (const Resource& resource)
828{
829	const VkImageBlit blitRegion =
830	{
831		resource.getImage().subresourceLayers,					// VkImageSubresourceLayers    srcSubresource;
832		{ makeOffset3D(0, 0, 0), makeExtentOffset(resource) },	// VkOffset3D                  srcOffsets[2];
833		resource.getImage().subresourceLayers,					// VkImageSubresourceLayers    dstSubresource;
834		{ makeOffset3D(0, 0, 0), makeExtentOffset(resource) },	// VkOffset3D                  dstOffsets[2];
835	};
836	return blitRegion;
837}
838
839class BlitImplementation : public ImplementationBase
840{
841public:
842	BlitImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
843		: ImplementationBase	(context, resource, mode)
844		, m_blitRegion			(makeBlitRegion(m_resource))
845	{
846		const InstanceInterface&	vki				= m_context.getInstanceInterface();
847		const VkPhysicalDevice		physDevice		= m_context.getPhysicalDevice();
848		const VkFormatProperties	formatProps		= getPhysicalDeviceFormatProperties(vki, physDevice, m_resource.getImage().format);
849		const VkFormatFeatureFlags	requiredFlags	= (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
850
851		// SRC and DST blit is required because both images are using the same format.
852		if ((formatProps.optimalTilingFeatures & requiredFlags) != requiredFlags)
853			TCU_THROW(NotSupportedError, "Format doesn't support blits");
854	}
855
856	void recordCopyCommand (const VkCommandBuffer cmdBuffer)
857	{
858		const DeviceInterface&	vk	= m_context.getDeviceInterface();
859
860		if (m_mode == ACCESS_MODE_READ)
861		{
862			// Resource Image -> Staging image
863			vk.cmdBlitImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
864				1u, &m_blitRegion, VK_FILTER_NEAREST);
865		}
866		else
867		{
868			// Staging image -> Resource Image
869			vk.cmdBlitImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
870				1u, &m_blitRegion, VK_FILTER_NEAREST);
871		}
872	}
873
874private:
875	const VkImageBlit	m_blitRegion;
876};
877
878VkImageCopy makeImageCopyRegion (const Resource& resource)
879{
880	const VkImageCopy imageCopyRegion =
881	{
882		resource.getImage().subresourceLayers,		// VkImageSubresourceLayers    srcSubresource;
883		makeOffset3D(0, 0, 0),						// VkOffset3D                  srcOffset;
884		resource.getImage().subresourceLayers,		// VkImageSubresourceLayers    dstSubresource;
885		makeOffset3D(0, 0, 0),						// VkOffset3D                  dstOffset;
886		resource.getImage().extent,					// VkExtent3D                  extent;
887	};
888	return imageCopyRegion;
889}
890
891class CopyImplementation : public ImplementationBase
892{
893public:
894	CopyImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
895		: ImplementationBase	(context, resource, mode)
896		, m_imageCopyRegion		(makeImageCopyRegion(m_resource))
897	{
898	}
899
900	void recordCopyCommand (const VkCommandBuffer cmdBuffer)
901	{
902		const DeviceInterface&	vk	= m_context.getDeviceInterface();
903
904		if (m_mode == ACCESS_MODE_READ)
905		{
906			// Resource Image -> Staging image
907			vk.cmdCopyImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
908		}
909		else
910		{
911			// Staging image -> Resource Image
912			vk.cmdCopyImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
913		}
914	}
915
916private:
917	const VkImageCopy	m_imageCopyRegion;
918};
919
920enum Type
921{
922	TYPE_COPY,
923	TYPE_BLIT,
924};
925
926class Support : public OperationSupport
927{
928public:
929	Support (const ResourceDescription& resourceDesc, const Type type, const AccessMode mode)
930		: m_type				(type)
931		, m_mode				(mode)
932	{
933		DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
934
935		const bool isDepthStencil	= isDepthStencilFormat(resourceDesc.imageFormat);
936		m_requiredQueueFlags		= (isDepthStencil || m_type == TYPE_BLIT ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
937
938		// Don't blit depth/stencil images.
939		DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
940	}
941
942	deUint32 getResourceUsageFlags (void) const
943	{
944		return (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : VK_BUFFER_USAGE_TRANSFER_DST_BIT);
945	}
946
947	VkQueueFlags getQueueFlags (const OperationContext& context) const
948	{
949		DE_UNREF(context);
950		return m_requiredQueueFlags;
951	}
952
953	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
954	{
955		if (m_type == TYPE_COPY)
956			return de::MovePtr<Operation>(new CopyImplementation(context, resource, m_mode));
957		else
958			return de::MovePtr<Operation>(new BlitImplementation(context, resource, m_mode));
959	}
960
961private:
962	const Type			m_type;
963	const AccessMode	m_mode;
964	VkQueueFlags		m_requiredQueueFlags;
965};
966
967} // CopyBlitImage ns
968
969namespace ShaderAccess
970{
971
972enum DispatchCall
973{
974	DISPATCH_CALL_DISPATCH,
975	DISPATCH_CALL_DISPATCH_INDIRECT,
976};
977
978class GraphicsPipeline : public Pipeline
979{
980public:
981	GraphicsPipeline (OperationContext& context, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
982		: m_vertices	(context)
983	{
984		const DeviceInterface&		vk				= context.getDeviceInterface();
985		const VkDevice				device			= context.getDevice();
986		Allocator&					allocator		= context.getAllocator();
987		const VkShaderStageFlags	requiredStages	= getRequiredStages(stage);
988
989		// Color attachment
990
991		m_colorFormat					= VK_FORMAT_R8G8B8A8_UNORM;
992		m_colorImageSubresourceRange	= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
993		m_colorImageExtent				= makeExtent3D(16u, 16u, 1u);
994		m_colorAttachmentImage			= de::MovePtr<Image>(new Image(vk, device, allocator,
995			makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
996			MemoryRequirement::Any));
997
998		// Pipeline
999
1000		m_colorAttachmentView	= makeImageView		(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
1001		m_renderPass			= makeRenderPass	(vk, device, m_colorFormat);
1002		m_framebuffer			= makeFramebuffer	(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height, 1u);
1003		m_pipelineLayout		= makePipelineLayout(vk, device, descriptorSetLayout);
1004
1005		GraphicsPipelineBuilder pipelineBuilder;
1006		pipelineBuilder
1007			.setRenderSize					(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
1008			.setVertexInputSingleAttribute	(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
1009			.setShader						(vk, device, VK_SHADER_STAGE_VERTEX_BIT,	context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
1010			.setShader						(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,	context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
1011
1012		if (requiredStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1013			pipelineBuilder
1014				.setPatchControlPoints	(m_vertices.getNumVertices())
1015				.setShader				(vk, device, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,		context.getBinaryCollection().get(shaderPrefix + "tesc"), DE_NULL)
1016				.setShader				(vk, device, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,	context.getBinaryCollection().get(shaderPrefix + "tese"), DE_NULL);
1017
1018		if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1019			pipelineBuilder
1020				.setShader	(vk, device, VK_SHADER_STAGE_GEOMETRY_BIT,	context.getBinaryCollection().get(shaderPrefix + "geom"), DE_NULL);
1021
1022		m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
1023	}
1024
1025	void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1026	{
1027		const DeviceInterface&	vk	= context.getDeviceInterface();
1028
1029		// Change color attachment image layout
1030		{
1031			const VkImageMemoryBarrier colorAttachmentLayoutBarrier = makeImageMemoryBarrier(
1032				(VkAccessFlags)0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1033				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1034				**m_colorAttachmentImage, m_colorImageSubresourceRange);
1035
1036			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0,
1037				0u, DE_NULL, 0u, DE_NULL, 1u, &colorAttachmentLayoutBarrier);
1038		}
1039
1040		{
1041			const VkRect2D renderArea = {
1042				makeOffset2D(0, 0),
1043				makeExtent2D(m_colorImageExtent.width, m_colorImageExtent.height),
1044			};
1045			const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
1046
1047			beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
1048		}
1049
1050		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
1051		vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1052		{
1053			const VkDeviceSize	vertexBufferOffset	= 0ull;
1054			const VkBuffer		vertexBuffer		= m_vertices.getVertexBuffer();
1055			vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
1056		}
1057
1058		vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
1059		endRenderPass(vk, cmdBuffer);
1060	}
1061
1062private:
1063	const VertexGrid			m_vertices;
1064	VkFormat					m_colorFormat;
1065	de::MovePtr<Image>			m_colorAttachmentImage;
1066	Move<VkImageView>			m_colorAttachmentView;
1067	VkExtent3D					m_colorImageExtent;
1068	VkImageSubresourceRange		m_colorImageSubresourceRange;
1069	Move<VkRenderPass>			m_renderPass;
1070	Move<VkFramebuffer>			m_framebuffer;
1071	Move<VkPipelineLayout>		m_pipelineLayout;
1072	Move<VkPipeline>			m_pipeline;
1073};
1074
1075class ComputePipeline : public Pipeline
1076{
1077public:
1078	ComputePipeline (OperationContext& context, const DispatchCall dispatchCall, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
1079		: m_dispatchCall	(dispatchCall)
1080	{
1081		const DeviceInterface&	vk			= context.getDeviceInterface();
1082		const VkDevice			device		= context.getDevice();
1083		Allocator&				allocator	= context.getAllocator();
1084
1085		if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1086		{
1087			m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
1088				makeBufferCreateInfo(sizeof(VkDispatchIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
1089
1090			const Allocation&					alloc				= m_indirectBuffer->getAllocation();
1091			VkDispatchIndirectCommand* const	pIndirectCommand	= static_cast<VkDispatchIndirectCommand*>(alloc.getHostPtr());
1092
1093			pIndirectCommand->x	= 1u;
1094			pIndirectCommand->y = 1u;
1095			pIndirectCommand->z	= 1u;
1096
1097			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), sizeof(VkDispatchIndirectCommand));
1098		}
1099
1100		const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
1101
1102		m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1103		m_pipeline		 = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData());
1104	}
1105
1106	void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1107	{
1108		const DeviceInterface&	vk	= context.getDeviceInterface();
1109
1110		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
1111		vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1112
1113		if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1114			vk.cmdDispatchIndirect(cmdBuffer, **m_indirectBuffer, 0u);
1115		else
1116			vk.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1117	}
1118
1119private:
1120	const DispatchCall			m_dispatchCall;
1121	de::MovePtr<Buffer>			m_indirectBuffer;
1122	Move<VkPipelineLayout>		m_pipelineLayout;
1123	Move<VkPipeline>			m_pipeline;
1124};
1125
1126//! Read/write operation on a UBO/SSBO in graphics/compute pipeline.
1127class BufferImplementation : public Operation
1128{
1129public:
1130	BufferImplementation (OperationContext&				context,
1131						  Resource&						resource,
1132						  const VkShaderStageFlagBits	stage,
1133						  const BufferType				bufferType,
1134						  const std::string&			shaderPrefix,
1135						  const AccessMode				mode,
1136						  const PipelineType			pipelineType,
1137						  const DispatchCall			dispatchCall)
1138		: m_context			(context)
1139		, m_resource		(resource)
1140		, m_stage			(stage)
1141		, m_pipelineStage	(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1142		, m_bufferType		(bufferType)
1143		, m_mode			(mode)
1144		, m_dispatchCall	(dispatchCall)
1145	{
1146		requireFeaturesForSSBOAccess (m_context, m_stage);
1147
1148		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1149		const VkDevice			device		= m_context.getDevice();
1150		Allocator&				allocator	= m_context.getAllocator();
1151
1152		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1153			vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
1154
1155		// Init host buffer data
1156		{
1157			const Allocation& alloc = m_hostBuffer->getAllocation();
1158			if (m_mode == ACCESS_MODE_READ)
1159				deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
1160			else
1161				fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1162			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_resource.getBuffer().size);
1163		}
1164
1165		// Prepare descriptors
1166		{
1167			const VkDescriptorType	bufferDescriptorType	= (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1168
1169			m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1170				.addSingleBinding(bufferDescriptorType, m_stage)
1171				.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
1172				.build(vk, device);
1173
1174			m_descriptorPool = DescriptorPoolBuilder()
1175				.addType(bufferDescriptorType)
1176				.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1177				.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1178
1179			m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1180
1181			const VkDescriptorBufferInfo  bufferInfo	 = makeDescriptorBufferInfo(m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1182			const VkDescriptorBufferInfo  hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1183
1184			if (m_mode == ACCESS_MODE_READ)
1185			{
1186				DescriptorSetUpdateBuilder()
1187					.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), bufferDescriptorType, &bufferInfo)
1188					.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1189					.update(vk, device);
1190			}
1191			else
1192			{
1193				DescriptorSetUpdateBuilder()
1194					.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1195					.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
1196					.update(vk, device);
1197			}
1198		}
1199
1200		// Create pipeline
1201		m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1202															 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1203	}
1204
1205	void recordCommands (const VkCommandBuffer cmdBuffer)
1206	{
1207		m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1208
1209		// Post draw/dispatch commands
1210
1211		if (m_mode == ACCESS_MODE_READ)
1212		{
1213			const DeviceInterface&	vk	= m_context.getDeviceInterface();
1214
1215			// Insert a barrier so data written by the shader is available to the host
1216			const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_resource.getBuffer().size);
1217			vk.cmdPipelineBarrier(cmdBuffer, m_pipelineStage, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
1218		}
1219	}
1220
1221	SyncInfo getSyncInfo (void) const
1222	{
1223		const VkAccessFlags	accessFlags = (m_mode == ACCESS_MODE_READ ? (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_ACCESS_UNIFORM_READ_BIT
1224																											 : VK_ACCESS_SHADER_READ_BIT)
1225																	  : VK_ACCESS_SHADER_WRITE_BIT);
1226		const SyncInfo		syncInfo	=
1227		{
1228			m_pipelineStage,				// VkPipelineStageFlags		stageMask;
1229			accessFlags,					// VkAccessFlags			accessMask;
1230			VK_IMAGE_LAYOUT_UNDEFINED,		// VkImageLayout			imageLayout;
1231		};
1232		return syncInfo;
1233	}
1234
1235	Data getData (void) const
1236	{
1237		return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1238	}
1239
1240private:
1241	OperationContext&			m_context;
1242	Resource&					m_resource;
1243	const VkShaderStageFlagBits	m_stage;
1244	const VkPipelineStageFlags	m_pipelineStage;
1245	const BufferType			m_bufferType;
1246	const AccessMode			m_mode;
1247	const DispatchCall			m_dispatchCall;
1248	de::MovePtr<Buffer>			m_hostBuffer;
1249	Move<VkDescriptorPool>		m_descriptorPool;
1250	Move<VkDescriptorSetLayout>	m_descriptorSetLayout;
1251	Move<VkDescriptorSet>		m_descriptorSet;
1252	de::MovePtr<Pipeline>		m_pipeline;
1253};
1254
1255class ImageImplementation : public Operation
1256{
1257public:
1258	ImageImplementation (OperationContext&				context,
1259						 Resource&						resource,
1260						 const VkShaderStageFlagBits	stage,
1261						 const std::string&				shaderPrefix,
1262						 const AccessMode				mode,
1263						 const PipelineType				pipelineType,
1264						 const DispatchCall				dispatchCall)
1265		: m_context				(context)
1266		, m_resource			(resource)
1267		, m_stage				(stage)
1268		, m_pipelineStage		(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1269		, m_mode				(mode)
1270		, m_dispatchCall		(dispatchCall)
1271		, m_hostBufferSizeBytes	(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
1272	{
1273		const DeviceInterface&		vk			= m_context.getDeviceInterface();
1274		const InstanceInterface&	vki			= m_context.getInstanceInterface();
1275		const VkDevice				device		= m_context.getDevice();
1276		const VkPhysicalDevice		physDevice	= m_context.getPhysicalDevice();
1277		Allocator&					allocator	= m_context.getAllocator();
1278
1279		// Image stores are always required, in either access mode.
1280		requireFeaturesForSSBOAccess(m_context, m_stage);
1281
1282		// Some storage image formats require additional capability.
1283		if (isStorageImageExtendedFormat(m_resource.getImage().format))
1284			requireFeatures(vki, physDevice, FEATURE_SHADER_STORAGE_IMAGE_EXTENDED_FORMATS);
1285
1286		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1287			vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
1288			MemoryRequirement::HostVisible));
1289
1290		// Init host buffer data
1291		{
1292			const Allocation& alloc = m_hostBuffer->getAllocation();
1293			if (m_mode == ACCESS_MODE_READ)
1294				deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
1295			else
1296				fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
1297			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_hostBufferSizeBytes);
1298		}
1299
1300		// Image resources
1301		{
1302			m_image = de::MovePtr<Image>(new Image(vk, device, allocator,
1303				makeImageCreateInfo(m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format,
1304									VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT),
1305				MemoryRequirement::Any));
1306
1307			if (m_mode == ACCESS_MODE_READ)
1308			{
1309				m_srcImage = &m_resource.getImage().handle;
1310				m_dstImage = &(**m_image);
1311			}
1312			else
1313			{
1314				m_srcImage = &(**m_image);
1315				m_dstImage = &m_resource.getImage().handle;
1316			}
1317
1318			const VkImageViewType viewType = getImageViewType(m_resource.getImage().imageType);
1319
1320			m_srcImageView	= makeImageView(vk, device, *m_srcImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1321			m_dstImageView	= makeImageView(vk, device, *m_dstImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1322		}
1323
1324		// Prepare descriptors
1325		{
1326			m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1327				.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1328				.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1329				.build(vk, device);
1330
1331			m_descriptorPool = DescriptorPoolBuilder()
1332				.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1333				.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1334				.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1335
1336			m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1337
1338			const VkDescriptorImageInfo srcImageInfo = makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
1339			const VkDescriptorImageInfo dstImageInfo = makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
1340
1341			DescriptorSetUpdateBuilder()
1342				.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
1343				.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
1344				.update(vk, device);
1345		}
1346
1347		// Create pipeline
1348		m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1349															 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1350	}
1351
1352	void recordCommands (const VkCommandBuffer cmdBuffer)
1353	{
1354		const DeviceInterface&	vk					= m_context.getDeviceInterface();
1355		const VkBufferImageCopy	bufferCopyRegion	= makeBufferImageCopy(m_resource.getImage().subresourceLayers, m_resource.getImage().extent);
1356
1357		// Destination image layout
1358		{
1359			const VkImageMemoryBarrier barrier = makeImageMemoryBarrier(
1360				(VkAccessFlags)0, VK_ACCESS_SHADER_WRITE_BIT,
1361				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1362				*m_dstImage, m_resource.getImage().subresourceRange);
1363
1364			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, m_pipelineStage, (VkDependencyFlags)0,
1365				0u, DE_NULL, 0u, DE_NULL, 1u, &barrier);
1366		}
1367
1368		// In write mode, source image must be filled with data.
1369		if (m_mode == ACCESS_MODE_WRITE)
1370		{
1371			// Layout for transfer
1372			{
1373				const VkImageMemoryBarrier barrier = makeImageMemoryBarrier(
1374					(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
1375					VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1376					*m_srcImage, m_resource.getImage().subresourceRange);
1377
1378				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
1379					0u, DE_NULL, 0u, DE_NULL, 1u, &barrier);
1380			}
1381
1382			// Host buffer -> Src image
1383			vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, *m_srcImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
1384
1385			// Layout for shader reading
1386			{
1387				const VkImageMemoryBarrier barrier = makeImageMemoryBarrier(
1388					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1389					VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1390					*m_srcImage, m_resource.getImage().subresourceRange);
1391
1392				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, m_pipelineStage, (VkDependencyFlags)0,
1393					0u, DE_NULL, 0u, DE_NULL, 1u, &barrier);
1394			}
1395		}
1396
1397		// Execute shaders
1398
1399		m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1400
1401		// Post draw/dispatch commands
1402
1403		if (m_mode == ACCESS_MODE_READ)
1404		{
1405			// Layout for transfer
1406			{
1407				const VkImageMemoryBarrier barrier = makeImageMemoryBarrier(
1408					VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1409					VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1410					*m_dstImage, m_resource.getImage().subresourceRange);
1411
1412				vk.cmdPipelineBarrier(cmdBuffer, m_pipelineStage, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0,
1413					0u, DE_NULL, 0u, DE_NULL, 1u, &barrier);
1414			}
1415
1416			// Dst image -> Host buffer
1417			vk.cmdCopyImageToBuffer(cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
1418
1419			// Insert a barrier so data written by the shader is available to the host
1420			{
1421				const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_hostBufferSizeBytes);
1422				vk.cmdPipelineBarrier(cmdBuffer, m_pipelineStage, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
1423			}
1424		}
1425	}
1426
1427	SyncInfo getSyncInfo (void) const
1428	{
1429		const VkAccessFlags	accessFlags = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_SHADER_READ_BIT : VK_ACCESS_SHADER_WRITE_BIT);
1430		const SyncInfo		syncInfo	=
1431		{
1432			m_pipelineStage,			// VkPipelineStageFlags		stageMask;
1433			accessFlags,				// VkAccessFlags			accessMask;
1434			VK_IMAGE_LAYOUT_GENERAL,	// VkImageLayout			imageLayout;
1435		};
1436		return syncInfo;
1437	}
1438
1439	Data getData (void) const
1440	{
1441		return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
1442	}
1443
1444private:
1445	OperationContext&			m_context;
1446	Resource&					m_resource;
1447	const VkShaderStageFlagBits	m_stage;
1448	const VkPipelineStageFlags	m_pipelineStage;
1449	const AccessMode			m_mode;
1450	const DispatchCall			m_dispatchCall;
1451	const VkDeviceSize			m_hostBufferSizeBytes;
1452	de::MovePtr<Buffer>			m_hostBuffer;
1453	de::MovePtr<Image>			m_image;			//! Additional image used as src or dst depending on operation mode.
1454	const VkImage*				m_srcImage;
1455	const VkImage*				m_dstImage;
1456	Move<VkImageView>			m_srcImageView;
1457	Move<VkImageView>			m_dstImageView;
1458	Move<VkDescriptorPool>		m_descriptorPool;
1459	Move<VkDescriptorSetLayout>	m_descriptorSetLayout;
1460	Move<VkDescriptorSet>		m_descriptorSet;
1461	de::MovePtr<Pipeline>		m_pipeline;
1462};
1463
1464//! Create generic passthrough shaders with bits of custom code inserted in a specific shader stage.
1465void initPassthroughPrograms (SourceCollections&			programCollection,
1466							  const std::string&			shaderPrefix,
1467							  const std::string&			declCode,
1468							  const std::string&			mainCode,
1469							  const VkShaderStageFlagBits	stage)
1470{
1471	const VkShaderStageFlags	requiredStages	= getRequiredStages(stage);
1472
1473	if (requiredStages & VK_SHADER_STAGE_VERTEX_BIT)
1474	{
1475		std::ostringstream src;
1476		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1477			<< "\n"
1478			<< "layout(location = 0) in vec4 v_in_position;\n"
1479			<< "\n"
1480			<< "out " << s_perVertexBlock << ";\n"
1481			<< "\n"
1482			<< (stage & VK_SHADER_STAGE_VERTEX_BIT ? declCode + "\n" : "")
1483			<< "void main (void)\n"
1484			<< "{\n"
1485			<< "    gl_Position = v_in_position;\n"
1486			<< (stage & VK_SHADER_STAGE_VERTEX_BIT ? mainCode : "")
1487			<< "}\n";
1488
1489		programCollection.glslSources.add(shaderPrefix + "vert") << glu::VertexSource(src.str());
1490	}
1491
1492	if (requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
1493	{
1494		std::ostringstream src;
1495		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1496			<< "\n"
1497			<< "layout(vertices = 3) out;\n"
1498			<< "\n"
1499			<< "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
1500			<< "\n"
1501			<< "out " << s_perVertexBlock << " gl_out[];\n"
1502			<< "\n"
1503			<< (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? declCode + "\n" : "")
1504			<< "void main (void)\n"
1505			<< "{\n"
1506			<< "    gl_TessLevelInner[0] = 1.0;\n"
1507			<< "    gl_TessLevelInner[1] = 1.0;\n"
1508			<< "\n"
1509			<< "    gl_TessLevelOuter[0] = 1.0;\n"
1510			<< "    gl_TessLevelOuter[1] = 1.0;\n"
1511			<< "    gl_TessLevelOuter[2] = 1.0;\n"
1512			<< "    gl_TessLevelOuter[3] = 1.0;\n"
1513			<< "\n"
1514			<< "    gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
1515			<< (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? "\n" + mainCode : "")
1516			<< "}\n";
1517
1518		programCollection.glslSources.add(shaderPrefix + "tesc") << glu::TessellationControlSource(src.str());
1519	}
1520
1521	if (requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
1522	{
1523		std::ostringstream src;
1524		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1525			<< "\n"
1526			<< "layout(triangles, equal_spacing, ccw) in;\n"
1527			<< "\n"
1528			<< "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
1529			<< "\n"
1530			<< "out " << s_perVertexBlock << ";\n"
1531			<< "\n"
1532			<< (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? declCode + "\n" : "")
1533			<< "void main (void)\n"
1534			<< "{\n"
1535			<< "    vec3 px = gl_TessCoord.x * gl_in[0].gl_Position.xyz;\n"
1536			<< "    vec3 py = gl_TessCoord.y * gl_in[1].gl_Position.xyz;\n"
1537			<< "    vec3 pz = gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
1538			<< "    gl_Position = vec4(px + py + pz, 1.0);\n"
1539			<< (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? mainCode : "")
1540			<< "}\n";
1541
1542		programCollection.glslSources.add(shaderPrefix + "tese") << glu::TessellationEvaluationSource(src.str());
1543	}
1544
1545	if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1546	{
1547		std::ostringstream src;
1548		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1549			<< "\n"
1550			<< "layout(triangles) in;\n"
1551			<< "layout(triangle_strip, max_vertices = 3) out;\n"
1552			<< "\n"
1553			<< "in " << s_perVertexBlock << " gl_in[];\n"
1554			<< "\n"
1555			<< "out " << s_perVertexBlock << ";\n"
1556			<< "\n"
1557			<< (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? declCode + "\n" : "")
1558			<< "void main (void)\n"
1559			<< "{\n"
1560			<< "    gl_Position = gl_in[0].gl_Position;\n"
1561			<< "    EmitVertex();\n"
1562			<< "\n"
1563			<< "    gl_Position = gl_in[1].gl_Position;\n"
1564			<< "    EmitVertex();\n"
1565			<< "\n"
1566			<< "    gl_Position = gl_in[2].gl_Position;\n"
1567			<< "    EmitVertex();\n"
1568			<< (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? "\n" + mainCode : "")
1569			<< "}\n";
1570
1571		programCollection.glslSources.add(shaderPrefix + "geom") << glu::GeometrySource(src.str());
1572	}
1573
1574	if (requiredStages & VK_SHADER_STAGE_FRAGMENT_BIT)
1575	{
1576		std::ostringstream src;
1577		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1578			<< "\n"
1579			<< "layout(location = 0) out vec4 o_color;\n"
1580			<< "\n"
1581			<< (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? declCode + "\n" : "")
1582			<< "void main (void)\n"
1583			<< "{\n"
1584			<< "    o_color = vec4(1.0);\n"
1585			<< (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? "\n" + mainCode : "")
1586			<< "}\n";
1587
1588		programCollection.glslSources.add(shaderPrefix + "frag") << glu::FragmentSource(src.str());
1589	}
1590
1591	if (requiredStages & VK_SHADER_STAGE_COMPUTE_BIT)
1592	{
1593		std::ostringstream src;
1594		src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
1595			<< "\n"
1596			<< "layout(local_size_x = 1) in;\n"
1597			<< "\n"
1598			<< (stage & VK_SHADER_STAGE_COMPUTE_BIT ? declCode + "\n" : "")
1599			<< "void main (void)\n"
1600			<< "{\n"
1601			<< (stage & VK_SHADER_STAGE_COMPUTE_BIT ? mainCode : "")
1602			<< "}\n";
1603
1604		programCollection.glslSources.add(shaderPrefix + "comp") << glu::ComputeSource(src.str());
1605	}
1606}
1607
1608class BufferSupport : public OperationSupport
1609{
1610public:
1611	BufferSupport (const ResourceDescription&	resourceDesc,
1612				   const BufferType				bufferType,
1613				   const AccessMode				mode,
1614				   const VkShaderStageFlagBits	stage,
1615				   const DispatchCall			dispatchCall = DISPATCH_CALL_DISPATCH)
1616		: m_resourceDesc	(resourceDesc)
1617		, m_bufferType		(bufferType)
1618		, m_mode			(mode)
1619		, m_stage			(stage)
1620		, m_shaderPrefix	(std::string(m_mode == ACCESS_MODE_READ ? "read_" : "write_") + (m_bufferType == BUFFER_TYPE_UNIFORM ? "ubo_" : "ssbo_"))
1621		, m_dispatchCall	(dispatchCall)
1622	{
1623		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
1624		DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
1625		DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
1626		DE_ASSERT(m_mode == ACCESS_MODE_READ || m_bufferType == BUFFER_TYPE_STORAGE);
1627		DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
1628		DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
1629
1630		assertValidShaderStage(m_stage);
1631	}
1632
1633	void initPrograms (SourceCollections& programCollection) const
1634	{
1635		DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
1636
1637		const std::string	bufferTypeStr	= (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
1638		const int			numVecElements	= static_cast<int>(m_resourceDesc.size.x() / sizeof(tcu::UVec4));  // std140 must be aligned to a multiple of 16
1639
1640		std::ostringstream declSrc;
1641		declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
1642				<< "    uvec4 data[" << numVecElements << "];\n"
1643				<< "} b_in;\n"
1644				<< "\n"
1645				<< "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
1646				<< "    uvec4 data[" << numVecElements << "];\n"
1647				<< "} b_out;\n";
1648
1649		std::ostringstream copySrc;
1650		copySrc << "    for (int i = 0; i < " << numVecElements << "; ++i) {\n"
1651				<< "        b_out.data[i] = b_in.data[i];\n"
1652				<< "    }\n";
1653
1654		initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
1655	}
1656
1657	deUint32 getResourceUsageFlags (void) const
1658	{
1659		return (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
1660	}
1661
1662	VkQueueFlags getQueueFlags (const OperationContext& context) const
1663	{
1664		DE_UNREF(context);
1665		return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
1666	}
1667
1668	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
1669	{
1670		if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
1671			return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
1672		else
1673			return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
1674	}
1675
1676private:
1677	const ResourceDescription	m_resourceDesc;
1678	const BufferType			m_bufferType;
1679	const AccessMode			m_mode;
1680	const VkShaderStageFlagBits	m_stage;
1681	const std::string			m_shaderPrefix;
1682	const DispatchCall			m_dispatchCall;
1683};
1684
1685class ImageSupport : public OperationSupport
1686{
1687public:
1688	ImageSupport (const ResourceDescription&	resourceDesc,
1689				  const AccessMode				mode,
1690				  const VkShaderStageFlagBits	stage,
1691				  const DispatchCall			dispatchCall = DISPATCH_CALL_DISPATCH)
1692		: m_resourceDesc	(resourceDesc)
1693		, m_mode			(mode)
1694		, m_stage			(stage)
1695		, m_shaderPrefix	(m_mode == ACCESS_MODE_READ ? "read_image_" : "write_image_")
1696		, m_dispatchCall	(dispatchCall)
1697	{
1698		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
1699		DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
1700		DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
1701
1702		assertValidShaderStage(m_stage);
1703	}
1704
1705	void initPrograms (SourceCollections& programCollection) const
1706	{
1707		const std::string	imageFormat	= getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
1708		const std::string	imageType	= getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
1709
1710		std::ostringstream declSrc;
1711		declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly  uniform " << imageType << " srcImg;\n"
1712				<< "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType << " dstImg;\n";
1713
1714		std::ostringstream mainSrc;
1715		if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
1716			mainSrc << "    for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
1717					<< "        imageStore(dstImg, x, imageLoad(srcImg, x));\n";
1718		else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
1719			mainSrc << "    for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
1720					<< "    for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
1721					<< "        imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
1722		else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
1723			mainSrc << "    for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
1724					<< "    for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
1725					<< "    for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
1726					<< "        imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
1727		else
1728			DE_ASSERT(0);
1729
1730		initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
1731	}
1732
1733	deUint32 getResourceUsageFlags (void) const
1734	{
1735		return VK_IMAGE_USAGE_STORAGE_BIT;
1736	}
1737
1738	VkQueueFlags getQueueFlags (const OperationContext& context) const
1739	{
1740		DE_UNREF(context);
1741		return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
1742	}
1743
1744	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
1745	{
1746		if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
1747			return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
1748		else
1749			return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
1750	}
1751
1752private:
1753	const ResourceDescription	m_resourceDesc;
1754	const AccessMode			m_mode;
1755	const VkShaderStageFlagBits	m_stage;
1756	const std::string			m_shaderPrefix;
1757	const DispatchCall			m_dispatchCall;
1758};
1759
1760} // ShaderAccess ns
1761
1762namespace CopyBufferToImage
1763{
1764
1765class WriteImplementation : public Operation
1766{
1767public:
1768	WriteImplementation (OperationContext& context, Resource& resource)
1769		: m_context		(context)
1770		, m_resource	(resource)
1771		, m_bufferSize	(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
1772	{
1773		DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
1774
1775		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1776		const VkDevice			device		= m_context.getDevice();
1777		Allocator&				allocator	= m_context.getAllocator();
1778
1779		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1780			vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
1781
1782		const Allocation& alloc = m_hostBuffer->getAllocation();
1783		fillPattern(alloc.getHostPtr(), m_bufferSize);
1784		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_bufferSize);
1785	}
1786
1787	void recordCommands (const VkCommandBuffer cmdBuffer)
1788	{
1789		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1790		const VkBufferImageCopy	copyRegion	= makeBufferImageCopy(m_resource.getImage().subresourceLayers, m_resource.getImage().extent);
1791
1792		const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
1793			(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
1794			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1795			m_resource.getImage().handle, m_resource.getImage().subresourceRange);
1796		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
1797
1798		vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
1799	}
1800
1801	SyncInfo getSyncInfo (void) const
1802	{
1803		const SyncInfo syncInfo =
1804		{
1805			VK_PIPELINE_STAGE_TRANSFER_BIT,			// VkPipelineStageFlags		stageMask;
1806			VK_ACCESS_TRANSFER_WRITE_BIT,			// VkAccessFlags			accessMask;
1807			VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,	// VkImageLayout			imageLayout;
1808		};
1809		return syncInfo;
1810	}
1811
1812	Data getData (void) const
1813	{
1814		return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
1815	}
1816
1817private:
1818	OperationContext&		m_context;
1819	Resource&				m_resource;
1820	de::MovePtr<Buffer>		m_hostBuffer;
1821	const VkDeviceSize		m_bufferSize;
1822};
1823
1824class ReadImplementation : public Operation
1825{
1826public:
1827	ReadImplementation (OperationContext& context, Resource& resource)
1828		: m_context				(context)
1829		, m_resource			(resource)
1830		, m_subresourceRange	(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
1831		, m_subresourceLayers	(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
1832	{
1833		DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
1834
1835		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1836		const VkDevice			device		= m_context.getDevice();
1837		Allocator&				allocator	= m_context.getAllocator();
1838		const VkFormat			format		= VK_FORMAT_R8G8B8A8_UNORM;
1839		const deUint32			pixelSize	= tcu::getPixelSize(mapVkFormat(format));
1840
1841		DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
1842		m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);  // there may be some unused space at the end
1843
1844		// Copy destination image.
1845		m_image = de::MovePtr<Image>(new Image(
1846			vk, device, allocator, makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT), MemoryRequirement::Any));
1847
1848		// Image data will be copied here, so it can be read on the host.
1849		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1850			vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
1851	}
1852
1853	void recordCommands (const VkCommandBuffer cmdBuffer)
1854	{
1855		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1856		const VkBufferImageCopy	copyRegion	= makeBufferImageCopy(m_subresourceLayers, m_imageExtent);
1857
1858		// Resource -> Image
1859		{
1860			const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
1861				(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
1862				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1863				**m_image, m_subresourceRange);
1864			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
1865
1866			vk.cmdCopyBufferToImage(cmdBuffer, m_resource.getBuffer().handle, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
1867		}
1868		// Image -> Host buffer
1869		{
1870			const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
1871				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1872				VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1873				**m_image, m_subresourceRange);
1874			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
1875
1876			vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &copyRegion);
1877
1878			const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_resource.getBuffer().size);
1879			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
1880		}
1881	}
1882
1883	SyncInfo getSyncInfo (void) const
1884	{
1885		const SyncInfo syncInfo =
1886		{
1887			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
1888			VK_ACCESS_TRANSFER_READ_BIT,		// VkAccessFlags			accessMask;
1889			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout			imageLayout;
1890		};
1891		return syncInfo;
1892	}
1893
1894	Data getData (void) const
1895	{
1896		return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1897	}
1898
1899private:
1900	OperationContext&				m_context;
1901	Resource&						m_resource;
1902	const VkImageSubresourceRange	m_subresourceRange;
1903	const VkImageSubresourceLayers	m_subresourceLayers;
1904	de::MovePtr<Buffer>				m_hostBuffer;
1905	de::MovePtr<Image>				m_image;
1906	VkExtent3D						m_imageExtent;
1907};
1908
1909class Support : public OperationSupport
1910{
1911public:
1912	Support (const ResourceDescription& resourceDesc, const AccessMode mode)
1913		: m_mode				(mode)
1914		, m_requiredQueueFlags	(resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
1915	{
1916		// From spec:
1917		//   Because depth or stencil aspect buffer to image copies may require format conversions on some implementations,
1918		//   they are not supported on queues that do not support graphics.
1919
1920		DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
1921		DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_BUFFER);
1922		DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_IMAGE);
1923	}
1924
1925	deUint32 getResourceUsageFlags (void) const
1926	{
1927		if (m_mode == ACCESS_MODE_READ)
1928			return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1929		else
1930			return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1931	}
1932
1933	VkQueueFlags getQueueFlags (const OperationContext& context) const
1934	{
1935		DE_UNREF(context);
1936		return m_requiredQueueFlags;
1937	}
1938
1939	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
1940	{
1941		if (m_mode == ACCESS_MODE_READ)
1942			return de::MovePtr<Operation>(new ReadImplementation(context, resource));
1943		else
1944			return de::MovePtr<Operation>(new WriteImplementation(context, resource));
1945	}
1946
1947private:
1948	const AccessMode			m_mode;
1949	const VkQueueFlags			m_requiredQueueFlags;
1950};
1951
1952} // CopyBufferToImage ns
1953
1954namespace CopyImageToBuffer
1955{
1956
1957class WriteImplementation : public Operation
1958{
1959public:
1960	WriteImplementation (OperationContext& context, Resource& resource)
1961		: m_context				(context)
1962		, m_resource			(resource)
1963		, m_subresourceRange	(makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
1964		, m_subresourceLayers	(makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
1965	{
1966		DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
1967
1968		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1969		const VkDevice			device		= m_context.getDevice();
1970		Allocator&				allocator	= m_context.getAllocator();
1971		const VkFormat			format		= VK_FORMAT_R8G8B8A8_UNORM;
1972		const deUint32			pixelSize	= tcu::getPixelSize(mapVkFormat(format));
1973
1974		DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
1975		m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);
1976
1977		// Source data staging buffer
1978		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1979			vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
1980
1981		const Allocation& alloc = m_hostBuffer->getAllocation();
1982		fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1983		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_resource.getBuffer().size);
1984
1985		// Source data image
1986		m_image = de::MovePtr<Image>(new Image(
1987			vk, device, allocator, makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT), MemoryRequirement::Any));
1988	}
1989
1990	void recordCommands (const VkCommandBuffer cmdBuffer)
1991	{
1992		const DeviceInterface&	vk			= m_context.getDeviceInterface();
1993		const VkBufferImageCopy	copyRegion	= makeBufferImageCopy(m_subresourceLayers, m_imageExtent);
1994
1995		// Host buffer -> Image
1996		{
1997			const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
1998				(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
1999				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2000				**m_image, m_subresourceRange);
2001			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
2002
2003			vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
2004		}
2005		// Image -> Resource
2006		{
2007			const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
2008				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2009				VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2010				**m_image, m_subresourceRange);
2011			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
2012
2013			vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getBuffer().handle, 1u, &copyRegion);
2014		}
2015	}
2016
2017	SyncInfo getSyncInfo (void) const
2018	{
2019		const SyncInfo syncInfo =
2020		{
2021			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
2022			VK_ACCESS_TRANSFER_WRITE_BIT,		// VkAccessFlags			accessMask;
2023			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout			imageLayout;
2024		};
2025		return syncInfo;
2026	}
2027
2028	Data getData (void) const
2029	{
2030		return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
2031	}
2032
2033private:
2034	OperationContext&				m_context;
2035	Resource&						m_resource;
2036	const VkImageSubresourceRange	m_subresourceRange;
2037	const VkImageSubresourceLayers	m_subresourceLayers;
2038	de::MovePtr<Buffer>				m_hostBuffer;
2039	de::MovePtr<Image>				m_image;
2040	VkExtent3D						m_imageExtent;
2041};
2042
2043class ReadImplementation : public Operation
2044{
2045public:
2046	ReadImplementation (OperationContext& context, Resource& resource)
2047		: m_context		(context)
2048		, m_resource	(resource)
2049		, m_bufferSize	(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
2050	{
2051		DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
2052
2053		const DeviceInterface&	vk			= m_context.getDeviceInterface();
2054		const VkDevice			device		= m_context.getDevice();
2055		Allocator&				allocator	= m_context.getAllocator();
2056
2057		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
2058			vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
2059
2060		const Allocation& alloc = m_hostBuffer->getAllocation();
2061		deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
2062		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_bufferSize);
2063	}
2064
2065	void recordCommands (const VkCommandBuffer cmdBuffer)
2066	{
2067		const DeviceInterface&	vk			= m_context.getDeviceInterface();
2068		const VkBufferImageCopy	copyRegion	= makeBufferImageCopy(m_resource.getImage().subresourceLayers, m_resource.getImage().extent);
2069
2070		vk.cmdCopyImageToBuffer(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &copyRegion);
2071	}
2072
2073	SyncInfo getSyncInfo (void) const
2074	{
2075		const SyncInfo syncInfo =
2076		{
2077			VK_PIPELINE_STAGE_TRANSFER_BIT,			// VkPipelineStageFlags		stageMask;
2078			VK_ACCESS_TRANSFER_READ_BIT,			// VkAccessFlags			accessMask;
2079			VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,	// VkImageLayout			imageLayout;
2080		};
2081		return syncInfo;
2082	}
2083
2084	Data getData (void) const
2085	{
2086		return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
2087	}
2088
2089private:
2090	OperationContext&		m_context;
2091	Resource&				m_resource;
2092	de::MovePtr<Buffer>		m_hostBuffer;
2093	const VkDeviceSize		m_bufferSize;
2094};
2095
2096class Support : public OperationSupport
2097{
2098public:
2099	Support (const ResourceDescription& resourceDesc, const AccessMode mode)
2100		: m_mode				(mode)
2101		, m_requiredQueueFlags	(resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
2102	{
2103		DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2104		DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_IMAGE);
2105		DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_BUFFER);
2106	}
2107
2108	deUint32 getResourceUsageFlags (void) const
2109	{
2110		if (m_mode == ACCESS_MODE_READ)
2111			return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2112		else
2113			return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2114	}
2115
2116	VkQueueFlags getQueueFlags (const OperationContext& context) const
2117	{
2118		DE_UNREF(context);
2119		return m_requiredQueueFlags;
2120	}
2121
2122	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2123	{
2124		if (m_mode == ACCESS_MODE_READ)
2125			return de::MovePtr<Operation>(new ReadImplementation(context, resource));
2126		else
2127			return de::MovePtr<Operation>(new WriteImplementation(context, resource));
2128	}
2129
2130private:
2131	const AccessMode			m_mode;
2132	const VkQueueFlags			m_requiredQueueFlags;
2133};
2134
2135} // CopyImageToBuffer ns
2136
2137namespace ClearImage
2138{
2139
2140enum ClearMode
2141{
2142	CLEAR_MODE_COLOR,
2143	CLEAR_MODE_DEPTH_STENCIL,
2144};
2145
2146class Implementation : public Operation
2147{
2148public:
2149	Implementation (OperationContext& context, Resource& resource, const ClearMode mode)
2150		: m_context		(context)
2151		, m_resource	(resource)
2152		, m_clearValue	(makeClearValue(m_resource.getImage().format))
2153		, m_mode		(mode)
2154	{
2155		const VkDeviceSize			size		= getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
2156		const VkExtent3D&			extent		= m_resource.getImage().extent;
2157		const VkFormat				format		= m_resource.getImage().format;
2158		const tcu::TextureFormat	texFormat	= mapVkFormat(format);
2159
2160		m_data.resize(static_cast<std::size_t>(size));
2161		tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
2162		clearPixelBuffer(imagePixels, m_clearValue);
2163	}
2164
2165	void recordCommands (const VkCommandBuffer cmdBuffer)
2166	{
2167		const DeviceInterface&	vk	= m_context.getDeviceInterface();
2168
2169		const VkImageMemoryBarrier layoutBarrier = makeImageMemoryBarrier(
2170			(VkAccessFlags)0, VK_ACCESS_TRANSFER_WRITE_BIT,
2171			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2172			m_resource.getImage().handle, m_resource.getImage().subresourceRange);
2173
2174		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 0u, DE_NULL, 1u, &layoutBarrier);
2175
2176		if (m_mode == CLEAR_MODE_COLOR)
2177			vk.cmdClearColorImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.color, 1u, &m_resource.getImage().subresourceRange);
2178		else
2179			vk.cmdClearDepthStencilImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.depthStencil, 1u, &m_resource.getImage().subresourceRange);
2180	}
2181
2182	SyncInfo getSyncInfo (void) const
2183	{
2184		const SyncInfo syncInfo =
2185		{
2186			VK_PIPELINE_STAGE_TRANSFER_BIT,			// VkPipelineStageFlags		stageMask;
2187			VK_ACCESS_TRANSFER_WRITE_BIT,			// VkAccessFlags			accessMask;
2188			VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,	// VkImageLayout			imageLayout;
2189		};
2190		return syncInfo;
2191	}
2192
2193	Data getData (void) const
2194	{
2195		const Data data =
2196		{
2197			m_data.size(),		// std::size_t		size;
2198			&m_data[0],			// const deUint8*	data;
2199		};
2200		return data;
2201	}
2202
2203private:
2204	OperationContext&		m_context;
2205	Resource&				m_resource;
2206	std::vector<deUint8>	m_data;
2207	const VkClearValue		m_clearValue;
2208	const ClearMode			m_mode;
2209};
2210
2211class Support : public OperationSupport
2212{
2213public:
2214	Support (const ResourceDescription& resourceDesc, const ClearMode mode)
2215		: m_resourceDesc	(resourceDesc)
2216		, m_mode			(mode)
2217	{
2218		DE_ASSERT(m_mode == CLEAR_MODE_COLOR || m_mode == CLEAR_MODE_DEPTH_STENCIL);
2219		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2220		DE_ASSERT(m_resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT || (m_mode != CLEAR_MODE_COLOR));
2221		DE_ASSERT((m_resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) || (m_mode != CLEAR_MODE_DEPTH_STENCIL));
2222	}
2223
2224	deUint32 getResourceUsageFlags (void) const
2225	{
2226		return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2227	}
2228
2229	VkQueueFlags getQueueFlags (const OperationContext& context) const
2230	{
2231		DE_UNREF(context);
2232		if (m_mode == CLEAR_MODE_COLOR)
2233			return VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
2234		else
2235			return VK_QUEUE_GRAPHICS_BIT;
2236	}
2237
2238	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2239	{
2240		return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
2241	}
2242
2243private:
2244	const ResourceDescription	m_resourceDesc;
2245	const ClearMode				m_mode;
2246};
2247
2248} // ClearImage ns
2249
2250namespace Draw
2251{
2252
2253enum DrawCall
2254{
2255	DRAW_CALL_DRAW,
2256	DRAW_CALL_DRAW_INDEXED,
2257	DRAW_CALL_DRAW_INDIRECT,
2258	DRAW_CALL_DRAW_INDEXED_INDIRECT,
2259};
2260
2261//! A write operation that is a result of drawing to an image.
2262//! \todo Add support for depth/stencil too?
2263class Implementation : public Operation
2264{
2265public:
2266	Implementation (OperationContext& context, Resource& resource, const DrawCall drawCall)
2267		: m_context		(context)
2268		, m_resource	(resource)
2269		, m_drawCall	(drawCall)
2270		, m_vertices	(context)
2271	{
2272		const DeviceInterface&		vk				= context.getDeviceInterface();
2273		const VkDevice				device			= context.getDevice();
2274		Allocator&					allocator		= context.getAllocator();
2275
2276		// Indirect buffer
2277
2278		if (m_drawCall == DRAW_CALL_DRAW_INDIRECT)
2279		{
2280			m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
2281				makeBufferCreateInfo(sizeof(VkDrawIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
2282
2283			const Allocation&				alloc				= m_indirectBuffer->getAllocation();
2284			VkDrawIndirectCommand* const	pIndirectCommand	= static_cast<VkDrawIndirectCommand*>(alloc.getHostPtr());
2285
2286			pIndirectCommand->vertexCount	= m_vertices.getNumVertices();
2287			pIndirectCommand->instanceCount	= 1u;
2288			pIndirectCommand->firstVertex	= 0u;
2289			pIndirectCommand->firstInstance	= 0u;
2290
2291			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), sizeof(VkDrawIndirectCommand));
2292		}
2293		else if (m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
2294		{
2295			m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
2296				makeBufferCreateInfo(sizeof(VkDrawIndexedIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
2297
2298			const Allocation&					alloc				= m_indirectBuffer->getAllocation();
2299			VkDrawIndexedIndirectCommand* const	pIndirectCommand	= static_cast<VkDrawIndexedIndirectCommand*>(alloc.getHostPtr());
2300
2301			pIndirectCommand->indexCount	= m_vertices.getNumIndices();
2302			pIndirectCommand->instanceCount	= 1u;
2303			pIndirectCommand->firstIndex	= 0u;
2304			pIndirectCommand->vertexOffset	= 0u;
2305			pIndirectCommand->firstInstance	= 0u;
2306
2307			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), sizeof(VkDrawIndexedIndirectCommand));
2308		}
2309
2310		// Resource image is the color attachment
2311
2312		m_colorFormat			= m_resource.getImage().format;
2313		m_colorSubresourceRange	= m_resource.getImage().subresourceRange;
2314		m_colorImage			= m_resource.getImage().handle;
2315		m_attachmentExtent		= m_resource.getImage().extent;
2316
2317		// Pipeline
2318
2319		m_colorAttachmentView	= makeImageView						  (vk, device, m_colorImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorSubresourceRange);
2320		m_renderPass			= makeRenderPass					  (vk, device, m_colorFormat);
2321		m_framebuffer			= makeFramebuffer					  (vk, device, *m_renderPass, *m_colorAttachmentView, m_attachmentExtent.width, m_attachmentExtent.height, 1u);
2322		m_pipelineLayout		= makePipelineLayoutWithoutDescriptors(vk, device);
2323
2324		GraphicsPipelineBuilder pipelineBuilder;
2325		pipelineBuilder
2326			.setRenderSize					(tcu::IVec2(m_attachmentExtent.width, m_attachmentExtent.height))
2327			.setVertexInputSingleAttribute	(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
2328			.setShader						(vk, device, VK_SHADER_STAGE_VERTEX_BIT,	context.getBinaryCollection().get("draw_vert"), DE_NULL)
2329			.setShader						(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,	context.getBinaryCollection().get("draw_frag"), DE_NULL);
2330
2331		m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
2332
2333		// Set expected draw values
2334
2335		m_expectedData.resize(static_cast<size_t>(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent)));
2336		tcu::PixelBufferAccess imagePixels(mapVkFormat(m_colorFormat), m_attachmentExtent.width, m_attachmentExtent.height, m_attachmentExtent.depth, &m_expectedData[0]);
2337		clearPixelBuffer(imagePixels, makeClearValue(m_colorFormat));
2338	}
2339
2340	void recordCommands (const VkCommandBuffer cmdBuffer)
2341	{
2342		const DeviceInterface&	vk	= m_context.getDeviceInterface();
2343
2344		// Change color attachment image layout
2345		{
2346			const VkImageMemoryBarrier colorAttachmentLayoutBarrier = makeImageMemoryBarrier(
2347				(VkAccessFlags)0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2348				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2349				m_colorImage, m_colorSubresourceRange);
2350
2351			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0,
2352				0u, DE_NULL, 0u, DE_NULL, 1u, &colorAttachmentLayoutBarrier);
2353		}
2354
2355		{
2356			const VkRect2D renderArea = {
2357				makeOffset2D(0, 0),
2358				makeExtent2D(m_attachmentExtent.width, m_attachmentExtent.height),
2359			};
2360			const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
2361
2362			beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
2363		}
2364
2365		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
2366		{
2367			const VkDeviceSize	vertexBufferOffset	= 0ull;
2368			const VkBuffer		vertexBuffer		= m_vertices.getVertexBuffer();
2369			vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
2370		}
2371
2372		if (m_drawCall == DRAW_CALL_DRAW_INDEXED || m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
2373			vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
2374
2375		switch (m_drawCall)
2376		{
2377			case DRAW_CALL_DRAW:
2378				vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
2379				break;
2380
2381			case DRAW_CALL_DRAW_INDEXED:
2382				vk.cmdDrawIndexed(cmdBuffer, m_vertices.getNumIndices(), 1u, 0u, 0, 0u);
2383				break;
2384
2385			case DRAW_CALL_DRAW_INDIRECT:
2386				vk.cmdDrawIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
2387				break;
2388
2389			case DRAW_CALL_DRAW_INDEXED_INDIRECT:
2390				vk.cmdDrawIndexedIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
2391				break;
2392		}
2393
2394		endRenderPass(vk, cmdBuffer);
2395	}
2396
2397	SyncInfo getSyncInfo (void) const
2398	{
2399		const SyncInfo syncInfo =
2400		{
2401			VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,		// VkPipelineStageFlags		stageMask;
2402			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,				// VkAccessFlags			accessMask;
2403			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout			imageLayout;
2404		};
2405		return syncInfo;
2406	}
2407
2408	Data getData (void) const
2409	{
2410		const Data data =
2411		{
2412			m_expectedData.size(),		// std::size_t		size;
2413			&m_expectedData[0],			// const deUint8*	data;
2414		};
2415		return data;
2416	}
2417
2418private:
2419	OperationContext&			m_context;
2420	Resource&					m_resource;
2421	const DrawCall				m_drawCall;
2422	const VertexGrid			m_vertices;
2423	std::vector<deUint8>		m_expectedData;
2424	de::MovePtr<Buffer>			m_indirectBuffer;
2425	VkFormat					m_colorFormat;
2426	VkImage						m_colorImage;
2427	Move<VkImageView>			m_colorAttachmentView;
2428	VkImageSubresourceRange		m_colorSubresourceRange;
2429	VkExtent3D					m_attachmentExtent;
2430	Move<VkRenderPass>			m_renderPass;
2431	Move<VkFramebuffer>			m_framebuffer;
2432	Move<VkPipelineLayout>		m_pipelineLayout;
2433	Move<VkPipeline>			m_pipeline;
2434};
2435
2436template<typename T, std::size_t N>
2437std::string toString (const T (&values)[N])
2438{
2439	std::ostringstream str;
2440	for (std::size_t i = 0; i < N; ++i)
2441		str << (i != 0 ? ", " : "") << values[i];
2442	return str.str();
2443}
2444
2445class Support : public OperationSupport
2446{
2447public:
2448	Support (const ResourceDescription& resourceDesc, const DrawCall drawCall)
2449		: m_resourceDesc	(resourceDesc)
2450		, m_drawCall		(drawCall)
2451	{
2452		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE && m_resourceDesc.imageType == VK_IMAGE_TYPE_2D);
2453		DE_ASSERT(!isDepthStencilFormat(m_resourceDesc.imageFormat));
2454	}
2455
2456	void initPrograms (SourceCollections& programCollection) const
2457	{
2458		// Vertex
2459		{
2460			std::ostringstream src;
2461			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2462				<< "\n"
2463				<< "layout(location = 0) in vec4 v_in_position;\n"
2464				<< "\n"
2465				<< "out " << s_perVertexBlock << ";\n"
2466				<< "\n"
2467				<< "void main (void)\n"
2468				<< "{\n"
2469				<< "    gl_Position = v_in_position;\n"
2470				<< "}\n";
2471
2472			programCollection.glslSources.add("draw_vert") << glu::VertexSource(src.str());
2473		}
2474
2475		// Fragment
2476		{
2477			const VkClearValue	clearValue		= makeClearValue(m_resourceDesc.imageFormat);
2478			const bool			isIntegerFormat = isIntFormat(m_resourceDesc.imageFormat) || isUintFormat(m_resourceDesc.imageFormat);
2479			const std::string	colorType		= (isIntegerFormat ? "uvec4" : "vec4");
2480
2481			std::ostringstream src;
2482			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2483				<< "\n"
2484				<< "layout(location = 0) out " << colorType << " o_color;\n"
2485				<< "\n"
2486				<< "void main (void)\n"
2487				<< "{\n"
2488				<< "    o_color = " << colorType << "(" << (isIntegerFormat ? toString(clearValue.color.uint32) : toString(clearValue.color.float32)) << ");\n"
2489				<< "}\n";
2490
2491			programCollection.glslSources.add("draw_frag") << glu::FragmentSource(src.str());
2492		}
2493	}
2494
2495	deUint32 getResourceUsageFlags (void) const
2496	{
2497		return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2498	}
2499
2500	VkQueueFlags getQueueFlags (const OperationContext& context) const
2501	{
2502		DE_UNREF(context);
2503		return VK_QUEUE_GRAPHICS_BIT;
2504	}
2505
2506	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2507	{
2508		return de::MovePtr<Operation>(new Implementation(context, resource, m_drawCall));
2509	}
2510
2511private:
2512	const ResourceDescription	m_resourceDesc;
2513	const DrawCall				m_drawCall;
2514};
2515
2516} // Draw ns
2517
2518namespace ClearAttachments
2519{
2520
2521class Implementation : public Operation
2522{
2523public:
2524	Implementation (OperationContext& context, Resource& resource)
2525		: m_context		(context)
2526		, m_resource	(resource)
2527		, m_clearValue	(makeClearValue(m_resource.getImage().format))
2528	{
2529		const DeviceInterface&		vk				= context.getDeviceInterface();
2530		const VkDevice				device			= context.getDevice();
2531
2532		const VkDeviceSize			size		= getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
2533		const VkExtent3D&			extent		= m_resource.getImage().extent;
2534		const VkFormat				format		= m_resource.getImage().format;
2535		const tcu::TextureFormat	texFormat	= mapVkFormat(format);
2536		const SyncInfo				syncInfo	= getSyncInfo();
2537
2538		m_data.resize(static_cast<std::size_t>(size));
2539		tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
2540		clearPixelBuffer(imagePixels, m_clearValue);
2541
2542		m_attachmentView = makeImageView(vk, device, m_resource.getImage().handle, getImageViewType(m_resource.getImage().imageType), m_resource.getImage().format, m_resource.getImage().subresourceRange);
2543
2544		const VkAttachmentDescription colorAttachmentDescription =
2545		{
2546			(VkAttachmentDescriptionFlags)0,	// VkAttachmentDescriptionFlags		flags;
2547			m_resource.getImage().format,		// VkFormat							format;
2548			VK_SAMPLE_COUNT_1_BIT,				// VkSampleCountFlagBits			samples;
2549			VK_ATTACHMENT_LOAD_OP_DONT_CARE,	// VkAttachmentLoadOp				loadOp;
2550			VK_ATTACHMENT_STORE_OP_STORE,		// VkAttachmentStoreOp				storeOp;
2551			VK_ATTACHMENT_LOAD_OP_DONT_CARE,	// VkAttachmentLoadOp				stencilLoadOp;
2552			VK_ATTACHMENT_STORE_OP_STORE,		// VkAttachmentStoreOp				stencilStoreOp;
2553			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout					initialLayout;
2554			syncInfo.imageLayout				// VkImageLayout					finalLayout;
2555		};
2556
2557		const VkAttachmentReference colorAttachmentReference =
2558		{
2559			0u,						// deUint32			attachment;
2560			syncInfo.imageLayout	// VkImageLayout	layout;
2561		};
2562
2563		const VkAttachmentReference depthStencilAttachmentReference =
2564		{
2565			0u,						// deUint32			attachment;
2566			syncInfo.imageLayout	// VkImageLayout	layout;
2567		};
2568
2569		VkSubpassDescription subpassDescription =
2570		{
2571			(VkSubpassDescriptionFlags)0,		// VkSubpassDescriptionFlags		flags;
2572			VK_PIPELINE_BIND_POINT_GRAPHICS,	// VkPipelineBindPoint				pipelineBindPoint;
2573			0u,									// deUint32							inputAttachmentCount;
2574			DE_NULL,							// const VkAttachmentReference*		pInputAttachments;
2575			0u,									// deUint32							colorAttachmentCount;
2576			DE_NULL,							// const VkAttachmentReference*		pColorAttachments;
2577			DE_NULL,							// const VkAttachmentReference*		pResolveAttachments;
2578			DE_NULL,							// const VkAttachmentReference*		pDepthStencilAttachment;
2579			0u,									// deUint32							preserveAttachmentCount;
2580			DE_NULL								// const deUint32*					pPreserveAttachments;
2581		};
2582
2583		switch (m_resource.getImage().subresourceRange.aspectMask)
2584		{
2585			case VK_IMAGE_ASPECT_COLOR_BIT:
2586				subpassDescription.colorAttachmentCount	= 1u;
2587				subpassDescription.pColorAttachments	= &colorAttachmentReference;
2588			break;
2589			case VK_IMAGE_ASPECT_STENCIL_BIT:
2590			case VK_IMAGE_ASPECT_DEPTH_BIT:
2591				subpassDescription.pDepthStencilAttachment = &depthStencilAttachmentReference;
2592			break;
2593			default:
2594				DE_ASSERT(0);
2595			break;
2596		}
2597
2598		const VkRenderPassCreateInfo renderPassInfo =
2599		{
2600			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,	// VkStructureType					sType;
2601			DE_NULL,									// const void*						pNext;
2602			(VkRenderPassCreateFlags)0,					// VkRenderPassCreateFlags			flags;
2603			1u,											// deUint32							attachmentCount;
2604			&colorAttachmentDescription,				// const VkAttachmentDescription*	pAttachments;
2605			1u,											// deUint32							subpassCount;
2606			&subpassDescription,						// const VkSubpassDescription*		pSubpasses;
2607			0u,											// deUint32							dependencyCount;
2608			DE_NULL										// const VkSubpassDependency*		pDependencies;
2609		};
2610
2611		m_renderPass	= createRenderPass(vk, device, &renderPassInfo);
2612		m_frameBuffer	= makeFramebuffer(vk, device, *m_renderPass, *m_attachmentView, m_resource.getImage().extent.width, m_resource.getImage().extent.height, 1u);
2613	}
2614
2615	void recordCommands (const VkCommandBuffer cmdBuffer)
2616	{
2617		const DeviceInterface&		vk						= m_context.getDeviceInterface();
2618		const VkRenderPassBeginInfo	renderPassBeginInfo		=
2619		{
2620			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
2621			DE_NULL,												// const void*			pNext;
2622			*m_renderPass,											// VkRenderPass			renderPass;
2623			*m_frameBuffer,											// VkFramebuffer		framebuffer;
2624			{
2625				{ 0, 0 },											// VkOffset2D			offset;
2626				{
2627					m_resource.getImage().extent.width,				// deUint32				width;
2628					m_resource.getImage().extent.height				// deUint32				height;
2629				}													// VkExtent2D			extent;
2630			},														// VkRect2D				renderArea;
2631			1u,														// deUint32				clearValueCount;
2632			&m_clearValue											// const VkClearValue*	pClearValues;
2633		};
2634
2635		vk.cmdBeginRenderPass(cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2636
2637		const VkClearAttachment	clearAttachment	=
2638		{
2639			m_resource.getImage().subresourceRange.aspectMask,	// VkImageAspectFlags	aspectMask;
2640			0,													// deUint32				colorAttachment;
2641			m_clearValue										// VkClearValue			clearValue;
2642		};
2643
2644		const VkRect2D			rect2D			=
2645		{
2646			{ 0u, 0u, },																	//	VkOffset2D	offset;
2647			{ m_resource.getImage().extent.width, m_resource.getImage().extent.height },	//	VkExtent2D	extent;
2648		};
2649
2650		const VkClearRect		clearRect		=
2651		{
2652			rect2D,												// VkRect2D	rect;
2653			0u,													// deUint32	baseArrayLayer;
2654			m_resource.getImage().subresourceLayers.layerCount	// deUint32	layerCount;
2655		};
2656
2657		vk.cmdClearAttachments(cmdBuffer, 1, &clearAttachment, 1, &clearRect);
2658
2659		vk.cmdEndRenderPass(cmdBuffer);
2660	}
2661
2662	SyncInfo getSyncInfo (void) const
2663	{
2664		SyncInfo syncInfo;
2665		syncInfo.stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
2666
2667		switch (m_resource.getImage().subresourceRange.aspectMask)
2668		{
2669			case VK_IMAGE_ASPECT_COLOR_BIT:
2670				syncInfo.accessMask		= VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2671				syncInfo.imageLayout	= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2672			break;
2673			case VK_IMAGE_ASPECT_STENCIL_BIT:
2674			case VK_IMAGE_ASPECT_DEPTH_BIT:
2675				syncInfo.accessMask		= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2676				syncInfo.imageLayout	= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2677			break;
2678			default:
2679				DE_ASSERT(0);
2680			break;
2681		}
2682
2683		return syncInfo;
2684	}
2685
2686	Data getData (void) const
2687	{
2688		const Data data =
2689		{
2690			m_data.size(),	// std::size_t		size;
2691			&m_data[0],		// const deUint8*	data;
2692		};
2693		return data;
2694	}
2695
2696private:
2697	OperationContext&		m_context;
2698	Resource&				m_resource;
2699	std::vector<deUint8>	m_data;
2700	const VkClearValue		m_clearValue;
2701	Move<VkImageView>		m_attachmentView;
2702	Move<VkRenderPass>		m_renderPass;
2703	Move<VkFramebuffer>		m_frameBuffer;
2704};
2705
2706class Support : public OperationSupport
2707{
2708public:
2709	Support (const ResourceDescription& resourceDesc)
2710		: m_resourceDesc (resourceDesc)
2711	{
2712		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2713	}
2714
2715	deUint32 getResourceUsageFlags (void) const
2716	{
2717		switch (m_resourceDesc.imageAspect)
2718		{
2719			case VK_IMAGE_ASPECT_COLOR_BIT:
2720				return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2721			case VK_IMAGE_ASPECT_STENCIL_BIT:
2722			case VK_IMAGE_ASPECT_DEPTH_BIT:
2723				return VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2724			default:
2725				DE_ASSERT(0);
2726		}
2727		return 0u;
2728	}
2729
2730	VkQueueFlags getQueueFlags (const OperationContext& context) const
2731	{
2732		DE_UNREF(context);
2733		return VK_QUEUE_GRAPHICS_BIT;
2734	}
2735
2736	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2737	{
2738		return de::MovePtr<Operation>(new Implementation(context, resource));
2739	}
2740
2741private:
2742	const ResourceDescription	m_resourceDesc;
2743};
2744
2745} // ClearAttachments
2746
2747namespace IndirectBuffer
2748{
2749
2750class GraphicsPipeline : public Pipeline
2751{
2752public:
2753	GraphicsPipeline (OperationContext&				context,
2754					  const ResourceType			resourceType,
2755					  const VkBuffer				indirectBuffer,
2756					  const std::string&			shaderPrefix,
2757					  const VkDescriptorSetLayout	descriptorSetLayout)
2758		: m_resourceType	(resourceType)
2759		, m_indirectBuffer	(indirectBuffer)
2760		, m_vertices		(context)
2761	{
2762		const DeviceInterface&		vk				= context.getDeviceInterface();
2763		const VkDevice				device			= context.getDevice();
2764		Allocator&					allocator		= context.getAllocator();
2765
2766		// Color attachment
2767
2768		m_colorFormat					= VK_FORMAT_R8G8B8A8_UNORM;
2769		m_colorImageSubresourceRange	= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
2770		m_colorImageExtent				= makeExtent3D(16u, 16u, 1u);
2771		m_colorAttachmentImage			= de::MovePtr<Image>(new Image(vk, device, allocator,
2772			makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
2773			MemoryRequirement::Any));
2774
2775		// Pipeline
2776
2777		m_colorAttachmentView	= makeImageView		(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
2778		m_renderPass			= makeRenderPass	(vk, device, m_colorFormat);
2779		m_framebuffer			= makeFramebuffer	(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height, 1u);
2780		m_pipelineLayout		= makePipelineLayout(vk, device, descriptorSetLayout);
2781
2782		GraphicsPipelineBuilder pipelineBuilder;
2783		pipelineBuilder
2784			.setRenderSize					(tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
2785			.setVertexInputSingleAttribute	(m_vertices.getVertexFormat(), m_vertices.getVertexStride())
2786			.setShader						(vk, device, VK_SHADER_STAGE_VERTEX_BIT,	context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
2787			.setShader						(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,	context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
2788
2789		m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
2790	}
2791
2792	void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
2793	{
2794		const DeviceInterface&	vk	= context.getDeviceInterface();
2795
2796		// Change color attachment image layout
2797		{
2798			const VkImageMemoryBarrier colorAttachmentLayoutBarrier = makeImageMemoryBarrier(
2799				(VkAccessFlags)0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2800				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2801				**m_colorAttachmentImage, m_colorImageSubresourceRange);
2802
2803			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0,
2804				0u, DE_NULL, 0u, DE_NULL, 1u, &colorAttachmentLayoutBarrier);
2805		}
2806
2807		{
2808			const VkRect2D renderArea = {
2809				makeOffset2D(0, 0),
2810				makeExtent2D(m_colorImageExtent.width, m_colorImageExtent.height),
2811			};
2812			const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
2813
2814			beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
2815		}
2816
2817		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
2818		vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
2819		{
2820			const VkDeviceSize	vertexBufferOffset	= 0ull;
2821			const VkBuffer		vertexBuffer		= m_vertices.getVertexBuffer();
2822			vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
2823		}
2824
2825		switch (m_resourceType)
2826		{
2827			case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
2828				vk.cmdDrawIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
2829				break;
2830
2831			case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
2832				vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
2833				vk.cmdDrawIndexedIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
2834				break;
2835
2836			default:
2837				DE_ASSERT(0);
2838				break;
2839		}
2840		endRenderPass(vk, cmdBuffer);
2841	}
2842
2843private:
2844	const ResourceType			m_resourceType;
2845	const VkBuffer				m_indirectBuffer;
2846	const VertexGrid			m_vertices;
2847	VkFormat					m_colorFormat;
2848	de::MovePtr<Image>			m_colorAttachmentImage;
2849	Move<VkImageView>			m_colorAttachmentView;
2850	VkExtent3D					m_colorImageExtent;
2851	VkImageSubresourceRange		m_colorImageSubresourceRange;
2852	Move<VkRenderPass>			m_renderPass;
2853	Move<VkFramebuffer>			m_framebuffer;
2854	Move<VkPipelineLayout>		m_pipelineLayout;
2855	Move<VkPipeline>			m_pipeline;
2856};
2857
2858class ComputePipeline : public Pipeline
2859{
2860public:
2861	ComputePipeline (OperationContext&				context,
2862					 const VkBuffer					indirectBuffer,
2863					 const std::string&				shaderPrefix,
2864					 const VkDescriptorSetLayout	descriptorSetLayout)
2865		: m_indirectBuffer	(indirectBuffer)
2866	{
2867		const DeviceInterface&	vk		= context.getDeviceInterface();
2868		const VkDevice			device	= context.getDevice();
2869
2870		const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
2871
2872		m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
2873		m_pipeline		 = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData());
2874	}
2875
2876	void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
2877	{
2878		const DeviceInterface&	vk	= context.getDeviceInterface();
2879
2880		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
2881		vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
2882		vk.cmdDispatchIndirect(cmdBuffer, m_indirectBuffer, 0u);
2883	}
2884
2885private:
2886	const VkBuffer				m_indirectBuffer;
2887	Move<VkPipelineLayout>		m_pipelineLayout;
2888	Move<VkPipeline>			m_pipeline;
2889};
2890
2891//! Read indirect buffer by executing an indirect draw or dispatch command.
2892class ReadImplementation : public Operation
2893{
2894public:
2895	ReadImplementation (OperationContext& context, Resource& resource)
2896		: m_context				(context)
2897		, m_resource			(resource)
2898		, m_stage				(resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT)
2899		, m_pipelineStage		(pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2900		, m_hostBufferSizeBytes	(sizeof(deUint32))
2901	{
2902		requireFeaturesForSSBOAccess (m_context, m_stage);
2903
2904		const DeviceInterface&	vk			= m_context.getDeviceInterface();
2905		const VkDevice			device		= m_context.getDevice();
2906		Allocator&				allocator	= m_context.getAllocator();
2907
2908		m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
2909			vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
2910
2911		// Init host buffer data
2912		{
2913			const Allocation& alloc = m_hostBuffer->getAllocation();
2914			deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
2915			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), static_cast<size_t>(m_hostBufferSizeBytes));
2916		}
2917
2918		// Prepare descriptors
2919		{
2920			m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2921				.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
2922				.build(vk, device);
2923
2924			m_descriptorPool = DescriptorPoolBuilder()
2925				.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2926				.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2927
2928			m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2929
2930			const VkDescriptorBufferInfo  hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
2931
2932			DescriptorSetUpdateBuilder()
2933				.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
2934				.update(vk, device);
2935		}
2936
2937		// Create pipeline
2938		m_pipeline = (m_resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH
2939			? de::MovePtr<Pipeline>(new ComputePipeline(context, m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout))
2940			: de::MovePtr<Pipeline>(new GraphicsPipeline(context, m_resource.getType(), m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout)));
2941	}
2942
2943	void recordCommands (const VkCommandBuffer cmdBuffer)
2944	{
2945		const DeviceInterface&	vk	= m_context.getDeviceInterface();
2946
2947		m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2948
2949		// Insert a barrier so data written by the shader is available to the host
2950		const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_hostBuffer, 0u, m_hostBufferSizeBytes);
2951		vk.cmdPipelineBarrier(cmdBuffer, m_pipelineStage, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
2952	}
2953
2954	SyncInfo getSyncInfo (void) const
2955	{
2956		const SyncInfo syncInfo =
2957		{
2958			VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,	// VkPipelineStageFlags		stageMask;
2959			VK_ACCESS_INDIRECT_COMMAND_READ_BIT,	// VkAccessFlags			accessMask;
2960			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout			imageLayout;
2961		};
2962		return syncInfo;
2963	}
2964
2965	Data getData (void) const
2966	{
2967		return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2968	}
2969
2970private:
2971	OperationContext&			m_context;
2972	Resource&					m_resource;
2973	const VkShaderStageFlagBits	m_stage;
2974	const VkPipelineStageFlags	m_pipelineStage;
2975	const VkDeviceSize			m_hostBufferSizeBytes;
2976	de::MovePtr<Buffer>			m_hostBuffer;
2977	Move<VkDescriptorPool>		m_descriptorPool;
2978	Move<VkDescriptorSetLayout>	m_descriptorSetLayout;
2979	Move<VkDescriptorSet>		m_descriptorSet;
2980	de::MovePtr<Pipeline>		m_pipeline;
2981};
2982
2983//! Prepare indirect buffer for a draw/dispatch call.
2984class WriteImplementation : public Operation
2985{
2986public:
2987	WriteImplementation (OperationContext& context, Resource& resource)
2988		: m_context			(context)
2989		, m_resource		(resource)
2990	{
2991		switch (m_resource.getType())
2992		{
2993			case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
2994			{
2995				m_drawIndirect.vertexCount		= 6u;
2996				m_drawIndirect.instanceCount	= 1u;
2997				m_drawIndirect.firstVertex		= 0u;
2998				m_drawIndirect.firstInstance	= 0u;
2999
3000				m_indirectData					= reinterpret_cast<deUint32*>(&m_drawIndirect);
3001				m_expectedValue					= 6u;
3002			}
3003			break;
3004
3005			case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
3006			{
3007				m_drawIndexedIndirect.indexCount	= 6u;
3008				m_drawIndexedIndirect.instanceCount	= 1u;
3009				m_drawIndexedIndirect.firstIndex	= 0u;
3010				m_drawIndexedIndirect.vertexOffset	= 0u;
3011				m_drawIndexedIndirect.firstInstance	= 0u;
3012
3013				m_indirectData						= reinterpret_cast<deUint32*>(&m_drawIndexedIndirect);
3014				m_expectedValue						= 6u;
3015			}
3016			break;
3017
3018			case RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH:
3019			{
3020				m_dispatchIndirect.x	= 7u;
3021				m_dispatchIndirect.y	= 2u;
3022				m_dispatchIndirect.z	= 1u;
3023
3024				m_indirectData			= reinterpret_cast<deUint32*>(&m_dispatchIndirect);
3025				m_expectedValue			= 14u;
3026			}
3027			break;
3028
3029			default:
3030				DE_ASSERT(0);
3031				break;
3032		}
3033	}
3034
3035	void recordCommands (const VkCommandBuffer cmdBuffer)
3036	{
3037		const DeviceInterface&	vk	= m_context.getDeviceInterface();
3038
3039		vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_indirectData);
3040	}
3041
3042	SyncInfo getSyncInfo (void) const
3043	{
3044		const SyncInfo syncInfo =
3045		{
3046			VK_PIPELINE_STAGE_TRANSFER_BIT,		// VkPipelineStageFlags		stageMask;
3047			VK_ACCESS_TRANSFER_WRITE_BIT,		// VkAccessFlags			accessMask;
3048			VK_IMAGE_LAYOUT_UNDEFINED,			// VkImageLayout			imageLayout;
3049		};
3050		return syncInfo;
3051	}
3052
3053	Data getData (void) const
3054	{
3055		const Data data =
3056		{
3057			sizeof(deUint32),									// std::size_t		size;
3058			reinterpret_cast<const deUint8*>(&m_expectedValue),	// const deUint8*	data;
3059		};
3060		return data;
3061	}
3062
3063private:
3064	OperationContext&				m_context;
3065	Resource&						m_resource;
3066	VkDrawIndirectCommand			m_drawIndirect;
3067	VkDrawIndexedIndirectCommand	m_drawIndexedIndirect;
3068	VkDispatchIndirectCommand		m_dispatchIndirect;
3069	deUint32*						m_indirectData;
3070	deUint32						m_expectedValue;	//! Side-effect value expected to be computed by a read (draw/dispatch) command.
3071};
3072
3073class ReadSupport : public OperationSupport
3074{
3075public:
3076	ReadSupport (const ResourceDescription& resourceDesc)
3077		: m_resourceDesc	(resourceDesc)
3078	{
3079		DE_ASSERT(isIndirectBuffer(m_resourceDesc.type));
3080	}
3081
3082	void initPrograms (SourceCollections& programCollection) const
3083	{
3084		std::ostringstream decl;
3085		decl << "layout(set = 0, binding = 0, std140) coherent buffer Data {\n"
3086			 << "    uint value;\n"
3087			 << "} sb_out;\n";
3088
3089		std::ostringstream main;
3090		main << "    atomicAdd(sb_out.value, 1u);\n";
3091
3092		// Vertex
3093		{
3094			std::ostringstream src;
3095			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
3096				<< "\n"
3097				<< "layout(location = 0) in vec4 v_in_position;\n"
3098				<< "\n"
3099				<< "out " << s_perVertexBlock << ";\n"
3100				<< "\n"
3101				<< decl.str()
3102				<< "\n"
3103				<< "void main (void)\n"
3104				<< "{\n"
3105				<< "    gl_Position = v_in_position;\n"
3106				<< main.str()
3107				<< "}\n";
3108
3109			programCollection.glslSources.add("read_ib_vert") << glu::VertexSource(src.str());
3110		}
3111
3112		// Fragment
3113		{
3114			std::ostringstream src;
3115			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
3116				<< "\n"
3117				<< "layout(location = 0) out vec4 o_color;\n"
3118				<< "\n"
3119				<< "void main (void)\n"
3120				<< "{\n"
3121				<< "    o_color = vec4(1.0);\n"
3122				<< "}\n";
3123
3124			programCollection.glslSources.add("read_ib_frag") << glu::FragmentSource(src.str());
3125		}
3126
3127		// Compute
3128		{
3129			std::ostringstream src;
3130			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
3131				<< "\n"
3132				<< "layout(local_size_x = 1) in;\n"
3133				<< "\n"
3134				<< decl.str()
3135				<< "\n"
3136				<< "void main (void)\n"
3137				<< "{\n"
3138				<< main.str()
3139				<< "}\n";
3140
3141			programCollection.glslSources.add("read_ib_comp") << glu::ComputeSource(src.str());
3142		}
3143	}
3144
3145	deUint32 getResourceUsageFlags (void) const
3146	{
3147		return VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
3148	}
3149
3150	VkQueueFlags getQueueFlags (const OperationContext& context) const
3151	{
3152		DE_UNREF(context);
3153		return (m_resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
3154	}
3155
3156	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3157	{
3158		return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3159	}
3160
3161private:
3162	const ResourceDescription	m_resourceDesc;
3163};
3164
3165
3166class WriteSupport : public OperationSupport
3167{
3168public:
3169	WriteSupport (const ResourceDescription& resourceDesc)
3170	{
3171		DE_ASSERT(isIndirectBuffer(resourceDesc.type));
3172		DE_UNREF(resourceDesc);
3173	}
3174
3175	deUint32 getResourceUsageFlags (void) const
3176	{
3177		return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3178	}
3179
3180	VkQueueFlags getQueueFlags (const OperationContext& context) const
3181	{
3182		DE_UNREF(context);
3183		return VK_QUEUE_TRANSFER_BIT;
3184	}
3185
3186	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3187	{
3188		return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3189	}
3190};
3191
3192} // IndirectBuffer ns
3193
3194namespace VertexInput
3195{
3196
3197class Implementation : public Operation
3198{
3199public:
3200	Implementation (OperationContext& context, Resource& resource)
3201		: m_context		(context)
3202		, m_resource	(resource)
3203	{
3204		requireFeaturesForSSBOAccess (m_context, VK_SHADER_STAGE_VERTEX_BIT);
3205
3206		const DeviceInterface&		vk				= context.getDeviceInterface();
3207		const VkDevice				device			= context.getDevice();
3208		Allocator&					allocator		= context.getAllocator();
3209		const VkDeviceSize			dataSizeBytes	= m_resource.getBuffer().size;
3210
3211		m_outputBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
3212			makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
3213
3214		{
3215			const Allocation& alloc = m_outputBuffer->getAllocation();
3216			deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(dataSizeBytes));
3217			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), dataSizeBytes);
3218		}
3219
3220		m_descriptorSetLayout = DescriptorSetLayoutBuilder()
3221			.addSingleBinding	(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_VERTEX_BIT)
3222			.build				(vk, device);
3223
3224		m_descriptorPool = DescriptorPoolBuilder()
3225			.addType	(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
3226			.build		(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3227
3228		m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
3229
3230		const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(m_outputBuffer->get(), 0ull, dataSizeBytes);
3231		DescriptorSetUpdateBuilder()
3232			.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
3233			.update		(vk, device);
3234
3235		// Color attachment
3236		m_colorFormat						= VK_FORMAT_R8G8B8A8_UNORM;
3237		m_colorImageSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
3238		m_colorImageExtent					= makeExtent3D(16u, 16u, 1u);
3239		m_colorAttachmentImage				= de::MovePtr<Image>(new Image(vk, device, allocator,
3240			makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
3241			MemoryRequirement::Any));
3242
3243		// Pipeline
3244		m_colorAttachmentView	= makeImageView		(vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
3245		m_renderPass			= makeRenderPass	(vk, device, m_colorFormat);
3246		m_framebuffer			= makeFramebuffer	(vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height, 1u);
3247		m_pipelineLayout		= makePipelineLayout(vk, device, *m_descriptorSetLayout);
3248
3249		m_pipeline = GraphicsPipelineBuilder()
3250			.setPrimitiveTopology			(VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
3251			.setRenderSize					(tcu::IVec2(static_cast<int>(m_colorImageExtent.width), static_cast<int>(m_colorImageExtent.height)))
3252			.setVertexInputSingleAttribute	(VK_FORMAT_R32G32B32A32_UINT, tcu::getPixelSize(mapVkFormat(VK_FORMAT_R32G32B32A32_UINT)))
3253			.setShader						(vk, device, VK_SHADER_STAGE_VERTEX_BIT,	context.getBinaryCollection().get("input_vert"), DE_NULL)
3254			.setShader						(vk, device, VK_SHADER_STAGE_FRAGMENT_BIT,	context.getBinaryCollection().get("input_frag"), DE_NULL)
3255			.build							(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
3256	}
3257
3258	void recordCommands (const VkCommandBuffer cmdBuffer)
3259	{
3260		const DeviceInterface&	vk				= m_context.getDeviceInterface();
3261		const VkDeviceSize		dataSizeBytes	= m_resource.getBuffer().size;
3262
3263		// Change color attachment image layout
3264		{
3265			const VkImageMemoryBarrier colorAttachmentLayoutBarrier = makeImageMemoryBarrier(
3266				(VkAccessFlags)0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3267				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
3268				**m_colorAttachmentImage, m_colorImageSubresourceRange);
3269
3270			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0,
3271				0u, DE_NULL, 0u, DE_NULL, 1u, &colorAttachmentLayoutBarrier);
3272		}
3273
3274		{
3275			const VkRect2D renderArea = {
3276				makeOffset2D(0, 0),
3277				makeExtent2D(m_colorImageExtent.width, m_colorImageExtent.height),
3278			};
3279			const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
3280
3281			beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
3282		}
3283
3284		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
3285		vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
3286		{
3287			const VkDeviceSize vertexBufferOffset = 0ull;
3288			vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &m_resource.getBuffer().handle, &vertexBufferOffset);
3289		}
3290
3291		vk.cmdDraw(cmdBuffer, static_cast<deUint32>(dataSizeBytes / sizeof(tcu::UVec4)), 1u, 0u, 0u);
3292
3293		endRenderPass(vk, cmdBuffer);
3294
3295		// Insert a barrier so data written by the shader is available to the host
3296		{
3297			const VkBufferMemoryBarrier	barrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, **m_outputBuffer, 0u, m_resource.getBuffer().size);
3298			vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, DE_NULL, 1u, &barrier, 0u, DE_NULL);
3299		}
3300	}
3301
3302	SyncInfo getSyncInfo (void) const
3303	{
3304		const SyncInfo syncInfo =
3305		{
3306			VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,		// VkPipelineStageFlags		stageMask;
3307			VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,	// VkAccessFlags			accessMask;
3308			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout			imageLayout;
3309		};
3310		return syncInfo;
3311	}
3312
3313	Data getData (void) const
3314	{
3315		return getHostBufferData(m_context, *m_outputBuffer, m_resource.getBuffer().size);
3316	}
3317
3318private:
3319	OperationContext&			m_context;
3320	Resource&					m_resource;
3321	de::MovePtr<Buffer>			m_outputBuffer;
3322	de::MovePtr<Buffer>			m_indexBuffer;
3323	de::MovePtr<Buffer>			m_indirectBuffer;
3324	Move<VkRenderPass>			m_renderPass;
3325	Move<VkFramebuffer>			m_framebuffer;
3326	Move<VkPipelineLayout>		m_pipelineLayout;
3327	Move<VkPipeline>			m_pipeline;
3328	VkFormat					m_colorFormat;
3329	de::MovePtr<Image>			m_colorAttachmentImage;
3330	Move<VkImageView>			m_colorAttachmentView;
3331	VkExtent3D					m_colorImageExtent;
3332	VkImageSubresourceRange		m_colorImageSubresourceRange;
3333	Move<VkDescriptorPool>		m_descriptorPool;
3334	Move<VkDescriptorSetLayout>	m_descriptorSetLayout;
3335	Move<VkDescriptorSet>		m_descriptorSet;
3336};
3337
3338class Support : public OperationSupport
3339{
3340public:
3341	Support (const ResourceDescription& resourceDesc)
3342		: m_resourceDesc	(resourceDesc)
3343	{
3344		DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
3345	}
3346
3347	void initPrograms (SourceCollections& programCollection) const
3348	{
3349		// Vertex
3350		{
3351			int vertexStride = sizeof(tcu::UVec4);
3352			std::ostringstream src;
3353			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
3354				<< "\n"
3355				<< "layout(location = 0) in uvec4 v_in_data;\n"
3356				<< "layout(set = 0, binding = 0, std140) writeonly buffer Output {\n"
3357				<< "    uvec4 data[" << m_resourceDesc.size.x()/vertexStride << "];\n"
3358				<< "} b_out;\n"
3359				<< "\n"
3360				<< "void main (void)\n"
3361				<< "{\n"
3362				<< "    b_out.data[gl_VertexIndex] = v_in_data;\n"
3363				<< "    gl_PointSize = 1.0f;\n"
3364				<< "}\n";
3365			programCollection.glslSources.add("input_vert") << glu::VertexSource(src.str());
3366		}
3367
3368		// Fragment
3369		{
3370			std::ostringstream src;
3371			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
3372				<< "\n"
3373				<< "layout(location = 0) out vec4 o_color;\n"
3374				<< "\n"
3375				<< "void main (void)\n"
3376				<< "{\n"
3377				<< "    o_color = vec4(1.0);\n"
3378				<< "}\n";
3379			programCollection.glslSources.add("input_frag") << glu::FragmentSource(src.str());
3380		}
3381	}
3382
3383	deUint32 getResourceUsageFlags (void) const
3384	{
3385		return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3386	}
3387
3388	VkQueueFlags getQueueFlags (const OperationContext& context) const
3389	{
3390		DE_UNREF(context);
3391		return VK_QUEUE_GRAPHICS_BIT;
3392	}
3393
3394	de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3395	{
3396		return de::MovePtr<Operation>(new Implementation(context, resource));
3397	}
3398
3399private:
3400	const ResourceDescription	m_resourceDesc;
3401};
3402
3403} // VertexInput
3404
3405} // anonymous ns
3406
3407OperationContext::OperationContext (Context& context, PipelineCacheData& pipelineCacheData)
3408	: m_vki					(context.getInstanceInterface())
3409	, m_vk					(context.getDeviceInterface())
3410	, m_physicalDevice		(context.getPhysicalDevice())
3411	, m_device				(context.getDevice())
3412	, m_allocator			(context.getDefaultAllocator())
3413	, m_progCollection		(context.getBinaryCollection())
3414	, m_pipelineCacheData	(pipelineCacheData)
3415	, m_deviceExtensions	(context.getDeviceExtensions())
3416	, m_usedApiVersion		(context.getUsedApiVersion())
3417{
3418}
3419
3420OperationContext::OperationContext (Context& context, PipelineCacheData& pipelineCacheData, const DeviceInterface& vk, const VkDevice device, vk::Allocator& allocator)
3421	: m_vki					(context.getInstanceInterface())
3422	, m_vk					(vk)
3423	, m_physicalDevice		(context.getPhysicalDevice())
3424	, m_device				(device)
3425	, m_allocator			(allocator)
3426	, m_progCollection		(context.getBinaryCollection())
3427	, m_pipelineCacheData	(pipelineCacheData)
3428	, m_deviceExtensions	(context.getDeviceExtensions())
3429	, m_usedApiVersion		(context.getUsedApiVersion())
3430{
3431}
3432
3433OperationContext::OperationContext (const deUint32					apiVersion,
3434									const vk::InstanceInterface&	vki,
3435									const vk::DeviceInterface&		vkd,
3436									vk::VkPhysicalDevice			physicalDevice,
3437									vk::VkDevice					device,
3438									vk::Allocator&					allocator,
3439									const std::vector<std::string>&	deviceExtensions,
3440									vk::BinaryCollection&			programCollection,
3441									PipelineCacheData&				pipelineCacheData)
3442	: m_vki					(vki)
3443	, m_vk					(vkd)
3444	, m_physicalDevice		(physicalDevice)
3445	, m_device				(device)
3446	, m_allocator			(allocator)
3447	, m_progCollection		(programCollection)
3448	, m_pipelineCacheData	(pipelineCacheData)
3449	, m_deviceExtensions	(deviceExtensions)
3450	, m_usedApiVersion		(apiVersion)
3451{
3452}
3453
3454Resource::Resource (OperationContext& context, const ResourceDescription& desc, const deUint32 usage, const vk::VkSharingMode sharingMode, const std::vector<deUint32>& queueFamilyIndex)
3455	: m_type	(desc.type)
3456{
3457	const DeviceInterface&		vk			= context.getDeviceInterface();
3458	const InstanceInterface&	vki			= context.getInstanceInterface();
3459	const VkDevice				device		= context.getDevice();
3460	const VkPhysicalDevice		physDevice	= context.getPhysicalDevice();
3461	Allocator&					allocator	= context.getAllocator();
3462
3463	if (m_type == RESOURCE_TYPE_BUFFER || isIndirectBuffer(m_type))
3464	{
3465		m_bufferData.offset					= 0u;
3466		m_bufferData.size					= static_cast<VkDeviceSize>(desc.size.x());
3467		VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_bufferData.size, usage);
3468		bufferCreateInfo.sharingMode		= sharingMode;
3469		if (queueFamilyIndex.size() > 0)
3470		{
3471			bufferCreateInfo.queueFamilyIndexCount	= static_cast<deUint32>(queueFamilyIndex.size());
3472			bufferCreateInfo.pQueueFamilyIndices	= &queueFamilyIndex[0];
3473		}
3474		m_buffer			= de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::Any));
3475		m_bufferData.handle	= **m_buffer;
3476	}
3477	else if (m_type == RESOURCE_TYPE_IMAGE)
3478	{
3479		m_imageData.extent				= makeExtent3D(desc.size.x(), std::max(1, desc.size.y()), std::max(1, desc.size.z()));
3480		m_imageData.imageType			= desc.imageType;
3481		m_imageData.format				= desc.imageFormat;
3482		m_imageData.subresourceRange	= makeImageSubresourceRange(desc.imageAspect, 0u, 1u, 0u, 1u);
3483		m_imageData.subresourceLayers	= makeImageSubresourceLayers(desc.imageAspect, 0u, 0u, 1u);
3484		VkImageCreateInfo imageInfo		= makeImageCreateInfo(m_imageData.imageType, m_imageData.extent, m_imageData.format, usage);
3485		imageInfo.sharingMode			= sharingMode;
3486		if (queueFamilyIndex.size() > 0)
3487		{
3488			imageInfo.queueFamilyIndexCount	= static_cast<deUint32>(queueFamilyIndex.size());
3489			imageInfo.pQueueFamilyIndices	= &queueFamilyIndex[0];
3490		}
3491
3492		VkImageFormatProperties	imageFormatProperties;
3493		const VkResult formatResult		= vki.getPhysicalDeviceImageFormatProperties(physDevice, imageInfo.format, imageInfo.imageType, imageInfo.tiling, imageInfo.usage, imageInfo.flags, &imageFormatProperties);
3494
3495		if (formatResult != VK_SUCCESS)
3496			TCU_THROW(NotSupportedError, "Image format is not supported");
3497
3498		m_image							= de::MovePtr<Image>(new Image(vk, device, allocator, imageInfo, MemoryRequirement::Any));
3499		m_imageData.handle				= **m_image;
3500	}
3501	else
3502		DE_ASSERT(0);
3503}
3504
3505Resource::Resource (ResourceType				type,
3506					vk::Move<vk::VkBuffer>		buffer,
3507					de::MovePtr<vk::Allocation>	allocation,
3508					vk::VkDeviceSize			offset,
3509					vk::VkDeviceSize			size)
3510	: m_type	(type)
3511	, m_buffer	(new Buffer(buffer, allocation))
3512{
3513	DE_ASSERT(type != RESOURCE_TYPE_IMAGE);
3514
3515	m_bufferData.handle	= m_buffer->get();
3516	m_bufferData.offset	= offset;
3517	m_bufferData.size	= size;
3518}
3519
3520Resource::Resource (vk::Move<vk::VkImage>			image,
3521					de::MovePtr<vk::Allocation>		allocation,
3522					const vk::VkExtent3D&			extent,
3523					vk::VkImageType					imageType,
3524					vk::VkFormat					format,
3525					vk::VkImageSubresourceRange		subresourceRange,
3526					vk::VkImageSubresourceLayers	subresourceLayers)
3527	: m_type	(RESOURCE_TYPE_IMAGE)
3528	, m_image	(new Image(image, allocation))
3529{
3530	m_imageData.handle				= m_image->get();
3531	m_imageData.extent				= extent;
3532	m_imageData.imageType			= imageType;
3533	m_imageData.format				= format;
3534	m_imageData.subresourceRange	= subresourceRange;
3535	m_imageData.subresourceLayers	= subresourceLayers;
3536}
3537
3538vk::VkDeviceMemory Resource::getMemory (void) const
3539{
3540	if (m_type == RESOURCE_TYPE_IMAGE)
3541		return m_image->getAllocation().getMemory();
3542	else
3543		return m_buffer->getAllocation().getMemory();
3544}
3545
3546//! \note This function exists for performance reasons. We're creating a lot of tests and checking requirements here
3547//!       before creating an OperationSupport object is faster.
3548bool isResourceSupported (const OperationName opName, const ResourceDescription& resourceDesc)
3549{
3550	switch (opName)
3551	{
3552		case OPERATION_NAME_WRITE_FILL_BUFFER:
3553		case OPERATION_NAME_WRITE_COPY_BUFFER:
3554		case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
3555		case OPERATION_NAME_WRITE_SSBO_VERTEX:
3556		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
3557		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
3558		case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
3559		case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
3560		case OPERATION_NAME_WRITE_SSBO_COMPUTE:
3561		case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
3562		case OPERATION_NAME_READ_COPY_BUFFER:
3563		case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
3564		case OPERATION_NAME_READ_SSBO_VERTEX:
3565		case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
3566		case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
3567		case OPERATION_NAME_READ_SSBO_GEOMETRY:
3568		case OPERATION_NAME_READ_SSBO_FRAGMENT:
3569		case OPERATION_NAME_READ_SSBO_COMPUTE:
3570		case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
3571		case OPERATION_NAME_READ_VERTEX_INPUT:
3572			return resourceDesc.type == RESOURCE_TYPE_BUFFER;
3573
3574		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
3575		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
3576			return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW;
3577
3578		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
3579		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
3580			return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED;
3581
3582		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
3583		case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
3584			return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH;
3585
3586		case OPERATION_NAME_WRITE_UPDATE_BUFFER:
3587			return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UPDATE_BUFFER_SIZE;
3588
3589		case OPERATION_NAME_WRITE_COPY_IMAGE:
3590		case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
3591		case OPERATION_NAME_READ_COPY_IMAGE:
3592		case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
3593			return resourceDesc.type == RESOURCE_TYPE_IMAGE;
3594
3595		case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
3596			return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType != VK_IMAGE_TYPE_3D;
3597
3598		case OPERATION_NAME_WRITE_BLIT_IMAGE:
3599		case OPERATION_NAME_READ_BLIT_IMAGE:
3600		case OPERATION_NAME_WRITE_IMAGE_VERTEX:
3601		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
3602		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
3603		case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
3604		case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
3605		case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
3606		case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
3607		case OPERATION_NAME_READ_IMAGE_VERTEX:
3608		case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
3609		case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
3610		case OPERATION_NAME_READ_IMAGE_GEOMETRY:
3611		case OPERATION_NAME_READ_IMAGE_FRAGMENT:
3612		case OPERATION_NAME_READ_IMAGE_COMPUTE:
3613		case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
3614			return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT;
3615
3616		case OPERATION_NAME_READ_UBO_VERTEX:
3617		case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
3618		case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
3619		case OPERATION_NAME_READ_UBO_GEOMETRY:
3620		case OPERATION_NAME_READ_UBO_FRAGMENT:
3621		case OPERATION_NAME_READ_UBO_COMPUTE:
3622		case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
3623			return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UBO_RANGE;
3624
3625		case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
3626			return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT;
3627
3628		case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
3629			return resourceDesc.type == RESOURCE_TYPE_IMAGE && (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
3630
3631		case OPERATION_NAME_WRITE_DRAW:
3632		case OPERATION_NAME_WRITE_DRAW_INDEXED:
3633		case OPERATION_NAME_WRITE_DRAW_INDIRECT:
3634		case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
3635			return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType == VK_IMAGE_TYPE_2D
3636				&& (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0;
3637
3638		default:
3639			DE_ASSERT(0);
3640			return false;
3641	}
3642}
3643
3644std::string getOperationName (const OperationName opName)
3645{
3646	switch (opName)
3647	{
3648		case OPERATION_NAME_WRITE_FILL_BUFFER:						return "write_fill_buffer";
3649		case OPERATION_NAME_WRITE_UPDATE_BUFFER:					return "write_update_buffer";
3650		case OPERATION_NAME_WRITE_COPY_BUFFER:						return "write_copy_buffer";
3651		case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:				return "write_copy_buffer_to_image";
3652		case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:				return "write_copy_image_to_buffer";
3653		case OPERATION_NAME_WRITE_COPY_IMAGE:						return "write_copy_image";
3654		case OPERATION_NAME_WRITE_BLIT_IMAGE:						return "write_blit_image";
3655		case OPERATION_NAME_WRITE_SSBO_VERTEX:						return "write_ssbo_vertex";
3656		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:		return "write_ssbo_tess_control";
3657		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:		return "write_ssbo_tess_eval";
3658		case OPERATION_NAME_WRITE_SSBO_GEOMETRY:					return "write_ssbo_geometry";
3659		case OPERATION_NAME_WRITE_SSBO_FRAGMENT:					return "write_ssbo_fragment";
3660		case OPERATION_NAME_WRITE_SSBO_COMPUTE:						return "write_ssbo_compute";
3661		case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:			return "write_ssbo_compute_indirect";
3662		case OPERATION_NAME_WRITE_IMAGE_VERTEX:						return "write_image_vertex";
3663		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:		return "write_image_tess_control";
3664		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:	return "write_image_tess_eval";
3665		case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:					return "write_image_geometry";
3666		case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:					return "write_image_fragment";
3667		case OPERATION_NAME_WRITE_IMAGE_COMPUTE:					return "write_image_compute";
3668		case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:			return "write_image_compute_indirect";
3669		case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:				return "write_clear_color_image";
3670		case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:		return "write_clear_depth_stencil_image";
3671		case OPERATION_NAME_WRITE_DRAW:								return "write_draw";
3672		case OPERATION_NAME_WRITE_DRAW_INDEXED:						return "write_draw_indexed";
3673		case OPERATION_NAME_WRITE_DRAW_INDIRECT:					return "write_draw_indirect";
3674		case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:			return "write_draw_indexed_indirect";
3675		case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:				return "write_clear_attachments";
3676		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:				return "write_indirect_buffer_draw";
3677		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:		return "write_indirect_buffer_draw_indexed";
3678		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:			return "write_indirect_buffer_dispatch";
3679
3680		case OPERATION_NAME_READ_COPY_BUFFER:						return "read_copy_buffer";
3681		case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:				return "read_copy_buffer_to_image";
3682		case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:				return "read_copy_image_to_buffer";
3683		case OPERATION_NAME_READ_COPY_IMAGE:						return "read_copy_image";
3684		case OPERATION_NAME_READ_BLIT_IMAGE:						return "read_blit_image";
3685		case OPERATION_NAME_READ_UBO_VERTEX:						return "read_ubo_vertex";
3686		case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:			return "read_ubo_tess_control";
3687		case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:		return "read_ubo_tess_eval";
3688		case OPERATION_NAME_READ_UBO_GEOMETRY:						return "read_ubo_geometry";
3689		case OPERATION_NAME_READ_UBO_FRAGMENT:						return "read_ubo_fragment";
3690		case OPERATION_NAME_READ_UBO_COMPUTE:						return "read_ubo_compute";
3691		case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:				return "read_ubo_compute_indirect";
3692		case OPERATION_NAME_READ_SSBO_VERTEX:						return "read_ssbo_vertex";
3693		case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:			return "read_ssbo_tess_control";
3694		case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:		return "read_ssbo_tess_eval";
3695		case OPERATION_NAME_READ_SSBO_GEOMETRY:						return "read_ssbo_geometry";
3696		case OPERATION_NAME_READ_SSBO_FRAGMENT:						return "read_ssbo_fragment";
3697		case OPERATION_NAME_READ_SSBO_COMPUTE:						return "read_ssbo_compute";
3698		case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:				return "read_ssbo_compute_indirect";
3699		case OPERATION_NAME_READ_IMAGE_VERTEX:						return "read_image_vertex";
3700		case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:		return "read_image_tess_control";
3701		case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:		return "read_image_tess_eval";
3702		case OPERATION_NAME_READ_IMAGE_GEOMETRY:					return "read_image_geometry";
3703		case OPERATION_NAME_READ_IMAGE_FRAGMENT:					return "read_image_fragment";
3704		case OPERATION_NAME_READ_IMAGE_COMPUTE:						return "read_image_compute";
3705		case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:			return "read_image_compute_indirect";
3706		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:				return "read_indirect_buffer_draw";
3707		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:		return "read_indirect_buffer_draw_indexed";
3708		case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:			return "read_indirect_buffer_dispatch";
3709		case OPERATION_NAME_READ_VERTEX_INPUT:						return "read_vertex_input";
3710
3711		default:
3712			DE_ASSERT(0);
3713			return "";
3714	}
3715}
3716
3717de::MovePtr<OperationSupport> makeOperationSupport (const OperationName opName, const ResourceDescription& resourceDesc)
3718{
3719	switch (opName)
3720	{
3721		case OPERATION_NAME_WRITE_FILL_BUFFER:						return de::MovePtr<OperationSupport>(new FillUpdateBuffer	::Support		(resourceDesc, FillUpdateBuffer::BUFFER_OP_FILL));
3722		case OPERATION_NAME_WRITE_UPDATE_BUFFER:					return de::MovePtr<OperationSupport>(new FillUpdateBuffer	::Support		(resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE));
3723		case OPERATION_NAME_WRITE_COPY_BUFFER:						return de::MovePtr<OperationSupport>(new CopyBuffer			::Support		(resourceDesc, ACCESS_MODE_WRITE));
3724		case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:				return de::MovePtr<OperationSupport>(new CopyBufferToImage	::Support		(resourceDesc, ACCESS_MODE_WRITE));
3725		case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:				return de::MovePtr<OperationSupport>(new CopyImageToBuffer	::Support		(resourceDesc, ACCESS_MODE_WRITE));
3726		case OPERATION_NAME_WRITE_COPY_IMAGE:						return de::MovePtr<OperationSupport>(new CopyBlitImage		::Support		(resourceDesc, CopyBlitImage::TYPE_COPY, ACCESS_MODE_WRITE));
3727		case OPERATION_NAME_WRITE_BLIT_IMAGE:						return de::MovePtr<OperationSupport>(new CopyBlitImage		::Support		(resourceDesc, CopyBlitImage::TYPE_BLIT, ACCESS_MODE_WRITE));
3728		case OPERATION_NAME_WRITE_SSBO_VERTEX:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_VERTEX_BIT));
3729		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:		return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
3730		case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:		return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
3731		case OPERATION_NAME_WRITE_SSBO_GEOMETRY:					return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_GEOMETRY_BIT));
3732		case OPERATION_NAME_WRITE_SSBO_FRAGMENT:					return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_FRAGMENT_BIT));
3733		case OPERATION_NAME_WRITE_SSBO_COMPUTE:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT));
3734		case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:			return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
3735		case OPERATION_NAME_WRITE_IMAGE_VERTEX:						return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_VERTEX_BIT));
3736		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:		return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
3737		case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:	return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
3738		case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:					return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_GEOMETRY_BIT));
3739		case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:					return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_FRAGMENT_BIT));
3740		case OPERATION_NAME_WRITE_IMAGE_COMPUTE:					return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT));
3741		case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:			return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
3742		case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:				return de::MovePtr<OperationSupport>(new ClearImage			::Support		(resourceDesc, ClearImage::CLEAR_MODE_COLOR));
3743		case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:		return de::MovePtr<OperationSupport>(new ClearImage			::Support		(resourceDesc, ClearImage::CLEAR_MODE_DEPTH_STENCIL));
3744		case OPERATION_NAME_WRITE_DRAW:								return de::MovePtr<OperationSupport>(new Draw				::Support		(resourceDesc, Draw::DRAW_CALL_DRAW));
3745		case OPERATION_NAME_WRITE_DRAW_INDEXED:						return de::MovePtr<OperationSupport>(new Draw				::Support		(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED));
3746		case OPERATION_NAME_WRITE_DRAW_INDIRECT:					return de::MovePtr<OperationSupport>(new Draw				::Support		(resourceDesc, Draw::DRAW_CALL_DRAW_INDIRECT));
3747		case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:			return de::MovePtr<OperationSupport>(new Draw				::Support		(resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED_INDIRECT));
3748		case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:				return de::MovePtr<OperationSupport>(new ClearAttachments	::Support		(resourceDesc));
3749		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:				return de::MovePtr<OperationSupport>(new IndirectBuffer		::WriteSupport	(resourceDesc));
3750		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:		return de::MovePtr<OperationSupport>(new IndirectBuffer		::WriteSupport	(resourceDesc));
3751		case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:			return de::MovePtr<OperationSupport>(new IndirectBuffer		::WriteSupport	(resourceDesc));
3752
3753		case OPERATION_NAME_READ_COPY_BUFFER:						return de::MovePtr<OperationSupport>(new CopyBuffer			::Support		(resourceDesc, ACCESS_MODE_READ));
3754		case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:				return de::MovePtr<OperationSupport>(new CopyBufferToImage	::Support		(resourceDesc, ACCESS_MODE_READ));
3755		case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:				return de::MovePtr<OperationSupport>(new CopyImageToBuffer	::Support		(resourceDesc, ACCESS_MODE_READ));
3756		case OPERATION_NAME_READ_COPY_IMAGE:						return de::MovePtr<OperationSupport>(new CopyBlitImage		::Support		(resourceDesc, CopyBlitImage::TYPE_COPY, ACCESS_MODE_READ));
3757		case OPERATION_NAME_READ_BLIT_IMAGE:						return de::MovePtr<OperationSupport>(new CopyBlitImage		::Support		(resourceDesc, CopyBlitImage::TYPE_BLIT, ACCESS_MODE_READ));
3758		case OPERATION_NAME_READ_UBO_VERTEX:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
3759		case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:			return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
3760		case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:		return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
3761		case OPERATION_NAME_READ_UBO_GEOMETRY:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
3762		case OPERATION_NAME_READ_UBO_FRAGMENT:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
3763		case OPERATION_NAME_READ_UBO_COMPUTE:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
3764		case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:				return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
3765		case OPERATION_NAME_READ_SSBO_VERTEX:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
3766		case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:			return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
3767		case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:		return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
3768		case OPERATION_NAME_READ_SSBO_GEOMETRY:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
3769		case OPERATION_NAME_READ_SSBO_FRAGMENT:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
3770		case OPERATION_NAME_READ_SSBO_COMPUTE:						return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
3771		case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:				return de::MovePtr<OperationSupport>(new ShaderAccess		::BufferSupport	(resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
3772		case OPERATION_NAME_READ_IMAGE_VERTEX:						return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
3773		case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:		return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
3774		case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:		return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
3775		case OPERATION_NAME_READ_IMAGE_GEOMETRY:					return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
3776		case OPERATION_NAME_READ_IMAGE_FRAGMENT:					return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
3777		case OPERATION_NAME_READ_IMAGE_COMPUTE:						return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
3778		case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:			return de::MovePtr<OperationSupport>(new ShaderAccess		::ImageSupport	(resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
3779		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:				return de::MovePtr<OperationSupport>(new IndirectBuffer		::ReadSupport	(resourceDesc));
3780		case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:		return de::MovePtr<OperationSupport>(new IndirectBuffer		::ReadSupport	(resourceDesc));
3781		case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:			return de::MovePtr<OperationSupport>(new IndirectBuffer		::ReadSupport	(resourceDesc));
3782		case OPERATION_NAME_READ_VERTEX_INPUT:						return de::MovePtr<OperationSupport>(new VertexInput		::Support		(resourceDesc));
3783
3784		default:
3785			DE_ASSERT(0);
3786			return de::MovePtr<OperationSupport>();
3787	}
3788}
3789
3790} // synchronization
3791} // vkt
3792