1/*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2015 Google Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 *      http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Pipeline barrier tests
22 *//*--------------------------------------------------------------------*/
23
24#include "vktMemoryPipelineBarrierTests.hpp"
25
26#include "vktTestCaseUtil.hpp"
27
28#include "vkDefs.hpp"
29#include "vkPlatform.hpp"
30#include "vkRefUtil.hpp"
31#include "vkQueryUtil.hpp"
32#include "vkMemUtil.hpp"
33#include "vkTypeUtil.hpp"
34#include "vkPrograms.hpp"
35
36#include "tcuMaybe.hpp"
37#include "tcuTextureUtil.hpp"
38#include "tcuTestLog.hpp"
39#include "tcuResultCollector.hpp"
40#include "tcuTexture.hpp"
41#include "tcuImageCompare.hpp"
42
43#include "deUniquePtr.hpp"
44#include "deStringUtil.hpp"
45#include "deRandom.hpp"
46
47#include "deMemory.h"
48#include "deMath.h"
49
50#include <map>
51#include <set>
52#include <sstream>
53#include <string>
54#include <vector>
55
56// \todo [2016-03-09 mika] Check bufferImageGranularity
57
58using tcu::TestLog;
59using tcu::Maybe;
60
61using std::string;
62using std::vector;
63using std::map;
64using std::set;
65using std::pair;
66
67using tcu::IVec2;
68using tcu::UVec4;
69using tcu::Vec4;
70using tcu::ConstPixelBufferAccess;
71using tcu::PixelBufferAccess;
72using tcu::TextureFormat;
73using tcu::TextureLevel;
74
75namespace vkt
76{
77namespace memory
78{
79namespace
80{
81enum
82{
83	ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
84						| vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
85						| vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
86						| vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
87						| vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
88						| vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
89						| vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
90						| vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
91						| vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
92						| vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
93						| vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
94						| vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
95						| vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
96						| vk::VK_PIPELINE_STAGE_TRANSFER_BIT
97						| vk::VK_PIPELINE_STAGE_HOST_BIT
98};
99
100enum
101{
102	ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
103				 | vk::VK_ACCESS_INDEX_READ_BIT
104				 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
105				 | vk::VK_ACCESS_UNIFORM_READ_BIT
106				 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
107				 | vk::VK_ACCESS_SHADER_READ_BIT
108				 | vk::VK_ACCESS_SHADER_WRITE_BIT
109				 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
110				 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
111				 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
112				 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
113				 | vk::VK_ACCESS_TRANSFER_READ_BIT
114				 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
115				 | vk::VK_ACCESS_HOST_READ_BIT
116				 | vk::VK_ACCESS_HOST_WRITE_BIT
117				 | vk::VK_ACCESS_MEMORY_READ_BIT
118				 | vk::VK_ACCESS_MEMORY_WRITE_BIT
119};
120
121enum Usage
122{
123	// Mapped host read and write
124	USAGE_HOST_READ = (0x1u<<0),
125	USAGE_HOST_WRITE = (0x1u<<1),
126
127	// Copy and other transfer operations
128	USAGE_TRANSFER_SRC = (0x1u<<2),
129	USAGE_TRANSFER_DST = (0x1u<<3),
130
131	// Buffer usage flags
132	USAGE_INDEX_BUFFER = (0x1u<<4),
133	USAGE_VERTEX_BUFFER = (0x1u<<5),
134
135	USAGE_UNIFORM_BUFFER = (0x1u<<6),
136	USAGE_STORAGE_BUFFER = (0x1u<<7),
137
138	USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
139	USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
140
141	// \todo [2016-03-09 mika] This is probably almost impossible to do
142	USAGE_INDIRECT_BUFFER = (0x1u<<10),
143
144	// Texture usage flags
145	USAGE_TEXTURE_SAMPLED = (0x1u<<11),
146	USAGE_TEXTURE_STORAGE = (0x1u<<12),
147	USAGE_COLOR_ATTACHMENT = (0x1u<<13),
148	USAGE_INPUT_ATTACHMENT = (0x1u<<14),
149	USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
150};
151
152bool supportsDeviceBufferWrites (Usage usage)
153{
154	if (usage & USAGE_TRANSFER_DST)
155		return true;
156
157	if (usage & USAGE_STORAGE_BUFFER)
158		return true;
159
160	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
161		return true;
162
163	return false;
164}
165
166bool supportsDeviceImageWrites (Usage usage)
167{
168	if (usage & USAGE_TRANSFER_DST)
169		return true;
170
171	if (usage & USAGE_TEXTURE_STORAGE)
172		return true;
173
174	if (usage & USAGE_COLOR_ATTACHMENT)
175		return true;
176
177	return false;
178}
179
180// Sequential access enums
181enum Access
182{
183    ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
184    ACCESS_INDEX_READ_BIT,
185    ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
186    ACCESS_UNIFORM_READ_BIT,
187    ACCESS_INPUT_ATTACHMENT_READ_BIT,
188    ACCESS_SHADER_READ_BIT,
189    ACCESS_SHADER_WRITE_BIT,
190    ACCESS_COLOR_ATTACHMENT_READ_BIT,
191    ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
192    ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
193    ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
194    ACCESS_TRANSFER_READ_BIT,
195    ACCESS_TRANSFER_WRITE_BIT,
196    ACCESS_HOST_READ_BIT,
197    ACCESS_HOST_WRITE_BIT,
198    ACCESS_MEMORY_READ_BIT,
199    ACCESS_MEMORY_WRITE_BIT,
200
201    ACCESS_LAST
202};
203
204// Sequential stage enums
205enum PipelineStage
206{
207	PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
208	PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
209	PIPELINESTAGE_DRAW_INDIRECT_BIT,
210	PIPELINESTAGE_VERTEX_INPUT_BIT,
211	PIPELINESTAGE_VERTEX_SHADER_BIT,
212	PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
213	PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
214	PIPELINESTAGE_GEOMETRY_SHADER_BIT,
215	PIPELINESTAGE_FRAGMENT_SHADER_BIT,
216	PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
217	PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
218	PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
219	PIPELINESTAGE_COMPUTE_SHADER_BIT,
220	PIPELINESTAGE_TRANSFER_BIT,
221	PIPELINESTAGE_HOST_BIT,
222
223	PIPELINESTAGE_LAST
224};
225
226PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
227{
228	switch (flags)
229	{
230		case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:						return PIPELINESTAGE_TOP_OF_PIPE_BIT;
231		case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:					return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
232		case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:					return PIPELINESTAGE_DRAW_INDIRECT_BIT;
233		case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:					return PIPELINESTAGE_VERTEX_INPUT_BIT;
234		case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:					return PIPELINESTAGE_VERTEX_SHADER_BIT;
235		case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:		return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
236		case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:	return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
237		case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:					return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
238		case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:					return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
239		case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:			return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
240		case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:				return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
241		case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:			return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
242		case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:					return PIPELINESTAGE_COMPUTE_SHADER_BIT;
243		case vk::VK_PIPELINE_STAGE_TRANSFER_BIT:						return PIPELINESTAGE_TRANSFER_BIT;
244		case vk::VK_PIPELINE_STAGE_HOST_BIT:							return PIPELINESTAGE_HOST_BIT;
245
246		default:
247			DE_FATAL("Unknown pipeline stage flags");
248			return PIPELINESTAGE_LAST;
249	}
250}
251
252Usage operator| (Usage a, Usage b)
253{
254	return (Usage)((deUint32)a | (deUint32)b);
255}
256
257Usage operator& (Usage a, Usage b)
258{
259	return (Usage)((deUint32)a & (deUint32)b);
260}
261
262string usageToName (Usage usage)
263{
264	const struct
265	{
266		Usage				usage;
267		const char* const	name;
268	} usageNames[] =
269	{
270		{ USAGE_HOST_READ,					"host_read" },
271		{ USAGE_HOST_WRITE,					"host_write" },
272
273		{ USAGE_TRANSFER_SRC,				"transfer_src" },
274		{ USAGE_TRANSFER_DST,				"transfer_dst" },
275
276		{ USAGE_INDEX_BUFFER,				"index_buffer" },
277		{ USAGE_VERTEX_BUFFER,				"vertex_buffer" },
278		{ USAGE_UNIFORM_BUFFER,				"uniform_buffer" },
279		{ USAGE_STORAGE_BUFFER,				"storage_buffer" },
280		{ USAGE_UNIFORM_TEXEL_BUFFER,		"uniform_texel_buffer" },
281		{ USAGE_STORAGE_TEXEL_BUFFER,		"storage_texel_buffer" },
282		{ USAGE_INDIRECT_BUFFER,			"indirect_buffer" },
283		{ USAGE_TEXTURE_SAMPLED,			"sampled_texture" },
284		{ USAGE_TEXTURE_STORAGE,			"texture_storage" },
285		{ USAGE_COLOR_ATTACHMENT,			"color_attachment" },
286		{ USAGE_INPUT_ATTACHMENT,			"input_attachment" },
287		{ USAGE_DEPTH_STENCIL_ATTACHMENT,	"depth_stencil_attachment" },
288	};
289
290	std::ostringstream	stream;
291	bool				first = true;
292
293	for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
294	{
295		if (usage & usageNames[usageNdx].usage)
296		{
297			if (!first)
298				stream << "_";
299			else
300				first = false;
301
302			stream << usageNames[usageNdx].name;
303		}
304	}
305
306	return stream.str();
307}
308
309vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
310{
311	vk::VkBufferUsageFlags flags = 0;
312
313	if (usage & USAGE_TRANSFER_SRC)
314		flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
315
316	if (usage & USAGE_TRANSFER_DST)
317		flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
318
319	if (usage & USAGE_INDEX_BUFFER)
320		flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
321
322	if (usage & USAGE_VERTEX_BUFFER)
323		flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
324
325	if (usage & USAGE_INDIRECT_BUFFER)
326		flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
327
328	if (usage & USAGE_UNIFORM_BUFFER)
329		flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
330
331	if (usage & USAGE_STORAGE_BUFFER)
332		flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
333
334	if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
335		flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
336
337	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
338		flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
339
340	return flags;
341}
342
343vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
344{
345	vk::VkImageUsageFlags flags = 0;
346
347	if (usage & USAGE_TRANSFER_SRC)
348		flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
349
350	if (usage & USAGE_TRANSFER_DST)
351		flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
352
353	if (usage & USAGE_TEXTURE_SAMPLED)
354		flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
355
356	if (usage & USAGE_TEXTURE_STORAGE)
357		flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
358
359	if (usage & USAGE_COLOR_ATTACHMENT)
360		flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
361
362	if (usage & USAGE_INPUT_ATTACHMENT)
363		flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
364
365	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
366		flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
367
368	return flags;
369}
370
371vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
372{
373	vk::VkPipelineStageFlags flags = 0;
374
375	if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
376		flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
377
378	if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
379		flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
380
381	if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
382		flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
383
384	if (usage & USAGE_INDIRECT_BUFFER)
385		flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
386
387	if (usage &
388			(USAGE_UNIFORM_BUFFER
389			| USAGE_STORAGE_BUFFER
390			| USAGE_UNIFORM_TEXEL_BUFFER
391			| USAGE_STORAGE_TEXEL_BUFFER
392			| USAGE_TEXTURE_SAMPLED
393			| USAGE_TEXTURE_STORAGE))
394	{
395		flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
396				| vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
397				| vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
398				| vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
399				| vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
400				| vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
401	}
402
403	if (usage & USAGE_INPUT_ATTACHMENT)
404		flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
405
406	if (usage & USAGE_COLOR_ATTACHMENT)
407		flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
408
409	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
410	{
411		flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
412				| vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
413	}
414
415	return flags;
416}
417
418vk::VkAccessFlags usageToAccessFlags (Usage usage)
419{
420	vk::VkAccessFlags flags = 0;
421
422	if (usage & USAGE_HOST_READ)
423		flags |= vk::VK_ACCESS_HOST_READ_BIT;
424
425	if (usage & USAGE_HOST_WRITE)
426		flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
427
428	if (usage & USAGE_TRANSFER_SRC)
429		flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
430
431	if (usage & USAGE_TRANSFER_DST)
432		flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
433
434	if (usage & USAGE_INDEX_BUFFER)
435		flags |= vk::VK_ACCESS_INDEX_READ_BIT;
436
437	if (usage & USAGE_VERTEX_BUFFER)
438		flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
439
440	if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
441		flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
442
443	if (usage & (USAGE_STORAGE_BUFFER
444				| USAGE_STORAGE_TEXEL_BUFFER
445				| USAGE_TEXTURE_SAMPLED
446				| USAGE_TEXTURE_STORAGE))
447		flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
448
449	if (usage & USAGE_INDIRECT_BUFFER)
450		flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
451
452	if (usage & USAGE_COLOR_ATTACHMENT)
453		flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
454
455	if (usage & USAGE_INPUT_ATTACHMENT)
456		flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
457
458	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
459		flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
460			| vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
461
462	return flags;
463}
464
465struct TestConfig
466{
467	Usage					usage;
468	vk::VkDeviceSize		size;
469	vk::VkSharingMode		sharing;
470};
471
472vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface&	vkd,
473												   vk::VkDevice					device,
474												   vk::VkCommandPool			pool,
475												   vk::VkCommandBufferLevel		level)
476{
477	const vk::VkCommandBufferAllocateInfo bufferInfo =
478	{
479		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
480		DE_NULL,
481
482		pool,
483		level,
484		1u
485	};
486
487	return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
488}
489
490vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface&	vkd,
491														vk::VkDevice				device,
492														vk::VkCommandPool			pool,
493														vk::VkCommandBufferLevel	level)
494{
495	const vk::VkCommandBufferInheritanceInfo	inheritInfo	=
496	{
497		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
498		DE_NULL,
499		0,
500		0,
501		0,
502		vk::VK_FALSE,
503		0u,
504		0u
505	};
506	const vk::VkCommandBufferBeginInfo			beginInfo =
507	{
508		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
509		DE_NULL,
510		0u,
511		(level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
512	};
513
514	vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
515
516	vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
517
518	return commandBuffer;
519}
520
521vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface&	vkd,
522											   vk::VkDevice					device,
523											   deUint32						queueFamilyIndex)
524{
525	const vk::VkCommandPoolCreateInfo poolInfo =
526	{
527		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
528		DE_NULL,
529
530		vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
531		queueFamilyIndex,
532	};
533
534	return vk::createCommandPool(vkd, device, &poolInfo);
535}
536
537vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface&	vkd,
538									 vk::VkDevice				device,
539									 vk::VkDeviceSize			size,
540									 vk::VkBufferUsageFlags		usage,
541									 vk::VkSharingMode			sharingMode,
542									 const vector<deUint32>&	queueFamilies)
543{
544	const vk::VkBufferCreateInfo	createInfo =
545	{
546		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
547		DE_NULL,
548
549		0,	// flags
550		size,
551		usage,
552		sharingMode,
553		(deUint32)queueFamilies.size(),
554		&queueFamilies[0]
555	};
556
557	return vk::createBuffer(vkd, device, &createInfo);
558}
559
560vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface&	vkd,
561										  vk::VkDevice					device,
562										  vk::VkDeviceSize				size,
563										  deUint32						memoryTypeIndex)
564{
565	const vk::VkMemoryAllocateInfo alloc =
566	{
567		vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
568		DE_NULL,									// pNext
569
570		size,
571		memoryTypeIndex
572	};
573
574	return vk::allocateMemory(vkd, device, &alloc);
575}
576
577vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface&	vki,
578											   const vk::DeviceInterface&	vkd,
579											   vk::VkPhysicalDevice			physicalDevice,
580											   vk::VkDevice					device,
581											   vk::VkBuffer					buffer,
582											   vk::VkMemoryPropertyFlags	properties)
583{
584	const vk::VkMemoryRequirements				memoryRequirements	= vk::getBufferMemoryRequirements(vkd, device, buffer);
585	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
586	deUint32									memoryTypeIndex;
587
588	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
589	{
590		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
591			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
592		{
593			try
594			{
595				const vk::VkMemoryAllocateInfo	allocationInfo	=
596				{
597					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
598					DE_NULL,
599					memoryRequirements.size,
600					memoryTypeIndex
601				};
602				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
603
604				VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
605
606				return memory;
607			}
608			catch (const vk::Error& error)
609			{
610				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
611					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
612				{
613					// Try next memory type/heap if out of memory
614				}
615				else
616				{
617					// Throw all other errors forward
618					throw;
619				}
620			}
621		}
622	}
623
624	TCU_FAIL("Failed to allocate memory for buffer");
625}
626
627vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface&	vki,
628											   const vk::DeviceInterface&	vkd,
629											   vk::VkPhysicalDevice			physicalDevice,
630											   vk::VkDevice					device,
631											   vk::VkImage					image,
632											   vk::VkMemoryPropertyFlags	properties)
633{
634	const vk::VkMemoryRequirements				memoryRequirements	= vk::getImageMemoryRequirements(vkd, device, image);
635	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
636	deUint32									memoryTypeIndex;
637
638	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
639	{
640		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
641			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
642		{
643			try
644			{
645				const vk::VkMemoryAllocateInfo	allocationInfo	=
646				{
647					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
648					DE_NULL,
649					memoryRequirements.size,
650					memoryTypeIndex
651				};
652				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
653
654				VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
655
656				return memory;
657			}
658			catch (const vk::Error& error)
659			{
660				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
661					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
662				{
663					// Try next memory type/heap if out of memory
664				}
665				else
666				{
667					// Throw all other errors forward
668					throw;
669				}
670			}
671		}
672	}
673
674	TCU_FAIL("Failed to allocate memory for image");
675}
676
677void queueRun (const vk::DeviceInterface&	vkd,
678			   vk::VkQueue					queue,
679			   vk::VkCommandBuffer			commandBuffer)
680{
681	const vk::VkSubmitInfo	submitInfo	=
682	{
683		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
684		DE_NULL,
685
686		0,
687		DE_NULL,
688		(const vk::VkPipelineStageFlags*)DE_NULL,
689
690		1,
691		&commandBuffer,
692
693		0,
694		DE_NULL
695	};
696
697	VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
698	VK_CHECK(vkd.queueWaitIdle(queue));
699}
700
701void* mapMemory (const vk::DeviceInterface&	vkd,
702				 vk::VkDevice				device,
703				 vk::VkDeviceMemory			memory,
704				 vk::VkDeviceSize			size)
705{
706	void* ptr;
707
708	VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
709
710	return ptr;
711}
712
713class ReferenceMemory
714{
715public:
716			ReferenceMemory	(size_t size);
717
718	void	set				(size_t pos, deUint8 val);
719	deUint8	get				(size_t pos) const;
720	bool	isDefined		(size_t pos) const;
721
722	void	setDefined		(size_t offset, size_t size, const void* data);
723	void	setUndefined	(size_t offset, size_t size);
724	void	setData			(size_t offset, size_t size, const void* data);
725
726	size_t	getSize			(void) const { return m_data.size(); }
727
728private:
729	vector<deUint8>		m_data;
730	vector<deUint64>	m_defined;
731};
732
733ReferenceMemory::ReferenceMemory (size_t size)
734	: m_data	(size, 0)
735	, m_defined	(size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
736{
737}
738
739void ReferenceMemory::set (size_t pos, deUint8 val)
740{
741	m_data[pos] = val;
742	m_defined[pos / 64] |= 0x1ull << (pos % 64);
743}
744
745void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
746{
747	const deUint8* data = (const deUint8*)data_;
748
749	// \todo [2016-03-09 mika] Optimize
750	for (size_t pos = 0; pos < size; pos++)
751	{
752		m_data[offset + pos] = data[pos];
753		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
754	}
755}
756
757void ReferenceMemory::setUndefined	(size_t offset, size_t size)
758{
759	// \todo [2016-03-09 mika] Optimize
760	for (size_t pos = 0; pos < size; pos++)
761		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
762}
763
764deUint8 ReferenceMemory::get (size_t pos) const
765{
766	DE_ASSERT(isDefined(pos));
767	return m_data[pos];
768}
769
770bool ReferenceMemory::isDefined (size_t pos) const
771{
772	return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
773}
774
775class Memory
776{
777public:
778							Memory				(const vk::InstanceInterface&	vki,
779												 const vk::DeviceInterface&		vkd,
780												 vk::VkPhysicalDevice			physicalDevice,
781												 vk::VkDevice					device,
782												 vk::VkDeviceSize				size,
783												 deUint32						memoryTypeIndex,
784												 vk::VkDeviceSize				maxBufferSize,
785												 deInt32						maxImageWidth,
786												 deInt32						maxImageHeight);
787
788	vk::VkDeviceSize		getSize				(void) const { return m_size; }
789	vk::VkDeviceSize		getMaxBufferSize	(void) const { return m_maxBufferSize; }
790	bool					getSupportBuffers	(void) const { return m_maxBufferSize > 0; }
791
792	deInt32					getMaxImageWidth	(void) const { return m_maxImageWidth; }
793	deInt32					getMaxImageHeight	(void) const { return m_maxImageHeight; }
794	bool					getSupportImages	(void) const { return m_maxImageWidth > 0; }
795
796	const vk::VkMemoryType&	getMemoryType		(void) const { return m_memoryType; }
797	deUint32				getMemoryTypeIndex	(void) const { return m_memoryTypeIndex; }
798	vk::VkDeviceMemory		getMemory			(void) const { return *m_memory; }
799
800private:
801	const vk::VkDeviceSize					m_size;
802	const deUint32							m_memoryTypeIndex;
803	const vk::VkMemoryType					m_memoryType;
804	const vk::Unique<vk::VkDeviceMemory>	m_memory;
805	const vk::VkDeviceSize					m_maxBufferSize;
806	const deInt32							m_maxImageWidth;
807	const deInt32							m_maxImageHeight;
808};
809
810vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface&	vki,
811									vk::VkPhysicalDevice			device,
812									deUint32						memoryTypeIndex)
813{
814	const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
815
816	DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
817
818	return memoryProperties.memoryTypes[memoryTypeIndex];
819}
820
821vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface&		vkd,
822									vk::VkDevice					device,
823
824									vk::VkBufferUsageFlags			usage,
825									vk::VkSharingMode				sharingMode,
826									const vector<deUint32>&			queueFamilies,
827
828									vk::VkDeviceSize				memorySize,
829									deUint32						memoryTypeIndex)
830{
831	vk::VkDeviceSize lastSuccess = 0;
832	vk::VkDeviceSize currentSize = memorySize / 2;
833
834	{
835		const vk::Unique<vk::VkBuffer>  buffer			(createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
836		const vk::VkMemoryRequirements  requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
837
838		if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
839			return memorySize;
840	}
841
842	for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
843	{
844		const vk::Unique<vk::VkBuffer>	buffer			(createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
845		const vk::VkMemoryRequirements	requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
846
847		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
848		{
849			lastSuccess = currentSize;
850			currentSize += stepSize;
851		}
852		else
853			currentSize -= stepSize;
854
855		if (stepSize == 0)
856			break;
857	}
858
859	return lastSuccess;
860}
861
862// Round size down maximum W * H * 4, where W and H < 4096
863vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
864{
865	const vk::VkDeviceSize	maxTextureSize	= 4096;
866	vk::VkDeviceSize		maxTexelCount	= size / 4;
867	vk::VkDeviceSize		bestW			= de::max(maxTexelCount, maxTextureSize);
868	vk::VkDeviceSize		bestH			= maxTexelCount / bestW;
869
870	// \todo [2016-03-09 mika] Could probably be faster?
871	for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
872	{
873		const vk::VkDeviceSize h = maxTexelCount / w;
874
875		if (bestW * bestH < w * h)
876		{
877			bestW = w;
878			bestH = h;
879		}
880	}
881
882	return bestW * bestH * 4;
883}
884
885// Find RGBA8 image size that has exactly "size" of number of bytes.
886// "size" must be W * H * 4 where W and H < 4096
887IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
888{
889	const vk::VkDeviceSize	maxTextureSize	= 4096;
890	vk::VkDeviceSize		texelCount		= size / 4;
891
892	DE_ASSERT((size % 4) == 0);
893
894	// \todo [2016-03-09 mika] Could probably be faster?
895	for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
896	{
897		const vk::VkDeviceSize	h	= texelCount / w;
898
899		if ((texelCount  % w) == 0 && h < maxTextureSize)
900			return IVec2((int)w, (int)h);
901	}
902
903	DE_FATAL("Invalid size");
904	return IVec2(-1, -1);
905}
906
907IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface&	vkd,
908							 vk::VkDevice				device,
909
910							 vk::VkImageUsageFlags		usage,
911							 vk::VkSharingMode			sharingMode,
912							 const vector<deUint32>&	queueFamilies,
913
914							 vk::VkDeviceSize			memorySize,
915							 deUint32					memoryTypeIndex)
916{
917	IVec2		lastSuccess		(0);
918	IVec2		currentSize;
919
920	{
921		const deUint32	texelCount	= (deUint32)(memorySize / 4);
922		const deUint32	width		= (deUint32)deFloatSqrt((float)texelCount);
923		const deUint32	height		= texelCount / width;
924
925		currentSize[0] = deMaxu32(width, height);
926		currentSize[1] = deMinu32(width, height);
927	}
928
929	for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
930	{
931		const vk::VkImageCreateInfo	createInfo		=
932		{
933			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
934			DE_NULL,
935
936			0u,
937			vk::VK_IMAGE_TYPE_2D,
938			vk::VK_FORMAT_R8G8B8A8_UNORM,
939			{
940				(deUint32)currentSize[0],
941				(deUint32)currentSize[1],
942				1u,
943			},
944			1u, 1u,
945			vk::VK_SAMPLE_COUNT_1_BIT,
946			vk::VK_IMAGE_TILING_OPTIMAL,
947			usage,
948			sharingMode,
949			(deUint32)queueFamilies.size(),
950			&queueFamilies[0],
951			vk::VK_IMAGE_LAYOUT_UNDEFINED
952		};
953		const vk::Unique<vk::VkImage>	image			(vk::createImage(vkd, device, &createInfo));
954		const vk::VkMemoryRequirements	requirements	(vk::getImageMemoryRequirements(vkd, device, *image));
955
956		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
957		{
958			lastSuccess = currentSize;
959			currentSize[0] += stepSize;
960			currentSize[1] += stepSize;
961		}
962		else
963		{
964			currentSize[0] -= stepSize;
965			currentSize[1] -= stepSize;
966		}
967
968		if (stepSize == 0)
969			break;
970	}
971
972	return lastSuccess;
973}
974
975Memory::Memory (const vk::InstanceInterface&	vki,
976				const vk::DeviceInterface&		vkd,
977				vk::VkPhysicalDevice			physicalDevice,
978				vk::VkDevice					device,
979				vk::VkDeviceSize				size,
980				deUint32						memoryTypeIndex,
981				vk::VkDeviceSize				maxBufferSize,
982				deInt32							maxImageWidth,
983				deInt32							maxImageHeight)
984	: m_size			(size)
985	, m_memoryTypeIndex	(memoryTypeIndex)
986	, m_memoryType		(getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
987	, m_memory			(allocMemory(vkd, device, size, memoryTypeIndex))
988	, m_maxBufferSize	(maxBufferSize)
989	, m_maxImageWidth	(maxImageWidth)
990	, m_maxImageHeight	(maxImageHeight)
991{
992}
993
994class Context
995{
996public:
997												Context					(const vk::InstanceInterface&						vki,
998																		 const vk::DeviceInterface&							vkd,
999																		 vk::VkPhysicalDevice								physicalDevice,
1000																		 vk::VkDevice										device,
1001																		 vk::VkQueue										queue,
1002																		 deUint32											queueFamilyIndex,
1003																		 const vector<pair<deUint32, vk::VkQueue> >&		queues,
1004																		 const vk::ProgramCollection<vk::ProgramBinary>&	binaryCollection)
1005		: m_vki					(vki)
1006		, m_vkd					(vkd)
1007		, m_physicalDevice		(physicalDevice)
1008		, m_device				(device)
1009		, m_queue				(queue)
1010		, m_queueFamilyIndex	(queueFamilyIndex)
1011		, m_queues				(queues)
1012		, m_commandPool			(createCommandPool(vkd, device, queueFamilyIndex))
1013		, m_binaryCollection	(binaryCollection)
1014	{
1015		for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1016			m_queueFamilies.push_back(m_queues[queueNdx].first);
1017	}
1018
1019	const vk::InstanceInterface&					getInstanceInterface	(void) const { return m_vki; }
1020	vk::VkPhysicalDevice							getPhysicalDevice		(void) const { return m_physicalDevice; }
1021	vk::VkDevice									getDevice				(void) const { return m_device; }
1022	const vk::DeviceInterface&						getDeviceInterface		(void) const { return m_vkd; }
1023	vk::VkQueue										getQueue				(void) const { return m_queue; }
1024	deUint32										getQueueFamily			(void) const { return m_queueFamilyIndex; }
1025	const vector<pair<deUint32, vk::VkQueue> >&		getQueues				(void) const { return m_queues; }
1026	const vector<deUint32>							getQueueFamilies		(void) const { return m_queueFamilies; }
1027	vk::VkCommandPool								getCommandPool			(void) const { return *m_commandPool; }
1028	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection		(void) const { return m_binaryCollection; }
1029
1030private:
1031	const vk::InstanceInterface&					m_vki;
1032	const vk::DeviceInterface&						m_vkd;
1033	const vk::VkPhysicalDevice						m_physicalDevice;
1034	const vk::VkDevice								m_device;
1035	const vk::VkQueue								m_queue;
1036	const deUint32									m_queueFamilyIndex;
1037	const vector<pair<deUint32, vk::VkQueue> >&		m_queues;
1038	const vk::Unique<vk::VkCommandPool>				m_commandPool;
1039	const vk::ProgramCollection<vk::ProgramBinary>&	m_binaryCollection;
1040	vector<deUint32>								m_queueFamilies;
1041};
1042
1043class PrepareContext
1044{
1045public:
1046							PrepareContext	(const Context&	context,
1047											 const Memory&	memory)
1048		: m_context	(context)
1049		, m_memory	(memory)
1050	{
1051	}
1052
1053	const Memory&									getMemory				(void) const { return m_memory; }
1054	const Context&									getContext				(void) const { return m_context; }
1055	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection		(void) const { return m_context.getBinaryCollection(); }
1056
1057	void					setBuffer		(vk::Move<vk::VkBuffer>	buffer,
1058											 vk::VkDeviceSize		size)
1059	{
1060		DE_ASSERT(!m_currentImage);
1061		DE_ASSERT(!m_currentBuffer);
1062
1063		m_currentBuffer		= buffer;
1064		m_currentBufferSize	= size;
1065	}
1066
1067	vk::VkBuffer			getBuffer		(void) const { return *m_currentBuffer; }
1068	vk::VkDeviceSize		getBufferSize	(void) const
1069	{
1070		DE_ASSERT(m_currentBuffer);
1071		return m_currentBufferSize;
1072	}
1073
1074	void					releaseBuffer	(void) { m_currentBuffer.disown(); }
1075
1076	void					setImage		(vk::Move<vk::VkImage>	image,
1077											 vk::VkImageLayout		layout,
1078											 vk::VkDeviceSize		memorySize,
1079											 deInt32				width,
1080											 deInt32				height)
1081	{
1082		DE_ASSERT(!m_currentImage);
1083		DE_ASSERT(!m_currentBuffer);
1084
1085		m_currentImage				= image;
1086		m_currentImageMemorySize	= memorySize;
1087		m_currentImageLayout		= layout;
1088		m_currentImageWidth			= width;
1089		m_currentImageHeight		= height;
1090	}
1091
1092	void				setImageLayout	(vk::VkImageLayout layout)
1093	{
1094		DE_ASSERT(m_currentImage);
1095		m_currentImageLayout = layout;
1096	}
1097
1098	vk::VkImage			getImage		(void) const { return *m_currentImage; }
1099	deInt32				getImageWidth	(void) const
1100	{
1101		DE_ASSERT(m_currentImage);
1102		return m_currentImageWidth;
1103	}
1104	deInt32				getImageHeight	(void) const
1105	{
1106		DE_ASSERT(m_currentImage);
1107		return m_currentImageHeight;
1108	}
1109	vk::VkDeviceSize	getImageMemorySize	(void) const
1110	{
1111		DE_ASSERT(m_currentImage);
1112		return m_currentImageMemorySize;
1113	}
1114
1115	void					releaseImage	(void) { m_currentImage.disown(); }
1116
1117	vk::VkImageLayout		getImageLayout	(void) const
1118	{
1119		DE_ASSERT(m_currentImage);
1120		return m_currentImageLayout;
1121	}
1122
1123private:
1124	const Context&			m_context;
1125	const Memory&			m_memory;
1126
1127	vk::Move<vk::VkBuffer>	m_currentBuffer;
1128	vk::VkDeviceSize		m_currentBufferSize;
1129
1130	vk::Move<vk::VkImage>	m_currentImage;
1131	vk::VkDeviceSize		m_currentImageMemorySize;
1132	vk::VkImageLayout		m_currentImageLayout;
1133	deInt32					m_currentImageWidth;
1134	deInt32					m_currentImageHeight;
1135};
1136
1137class ExecuteContext
1138{
1139public:
1140					ExecuteContext	(const Context&	context)
1141		: m_context	(context)
1142	{
1143	}
1144
1145	const Context&	getContext		(void) const { return m_context; }
1146	void			setMapping		(void* ptr) { m_mapping = ptr; }
1147	void*			getMapping		(void) const { return m_mapping; }
1148
1149private:
1150	const Context&	m_context;
1151	void*			m_mapping;
1152};
1153
1154class VerifyContext
1155{
1156public:
1157							VerifyContext		(TestLog&				log,
1158												 tcu::ResultCollector&	resultCollector,
1159												 const Context&			context,
1160												 vk::VkDeviceSize		size)
1161		: m_log				(log)
1162		, m_resultCollector	(resultCollector)
1163		, m_context			(context)
1164		, m_reference		((size_t)size)
1165	{
1166	}
1167
1168	const Context&			getContext			(void) const { return m_context; }
1169	TestLog&				getLog				(void) const { return m_log; }
1170	tcu::ResultCollector&	getResultCollector	(void) const { return m_resultCollector; }
1171
1172	ReferenceMemory&		getReference		(void) { return m_reference; }
1173	TextureLevel&			getReferenceImage	(void) { return m_referenceImage;}
1174
1175private:
1176	TestLog&				m_log;
1177	tcu::ResultCollector&	m_resultCollector;
1178	const Context&			m_context;
1179	ReferenceMemory			m_reference;
1180	TextureLevel			m_referenceImage;
1181};
1182
1183class Command
1184{
1185public:
1186	// Constructor should allocate all non-vulkan resources.
1187	virtual				~Command	(void) {}
1188
1189	// Get name of the command
1190	virtual const char*	getName		(void) const = 0;
1191
1192	// Log prepare operations
1193	virtual void		logPrepare	(TestLog&, size_t) const {}
1194	// Log executed operations
1195	virtual void		logExecute	(TestLog&, size_t) const {}
1196
1197	// Prepare should allocate all vulkan resources and resources that require
1198	// that buffer or memory has been already allocated. This should build all
1199	// command buffers etc.
1200	virtual void		prepare		(PrepareContext&) {}
1201
1202	// Execute command. Write or read mapped memory, submit commands to queue
1203	// etc.
1204	virtual void		execute		(ExecuteContext&) {}
1205
1206	// Verify that results are correct.
1207	virtual void		verify		(VerifyContext&, size_t) {}
1208
1209protected:
1210	// Allow only inheritance
1211						Command		(void) {}
1212
1213private:
1214	// Disallow copying
1215						Command		(const Command&);
1216	Command&			operator&	(const Command&);
1217};
1218
1219class Map : public Command
1220{
1221public:
1222						Map			(void) {}
1223						~Map		(void) {}
1224	const char*			getName		(void) const { return "Map"; }
1225
1226
1227	void				logExecute	(TestLog& log, size_t commandIndex) const
1228	{
1229		log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1230	}
1231
1232	void				prepare		(PrepareContext& context)
1233	{
1234		m_memory	= context.getMemory().getMemory();
1235		m_size		= context.getMemory().getSize();
1236	}
1237
1238	void				execute		(ExecuteContext& context)
1239	{
1240		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1241		const vk::VkDevice			device	= context.getContext().getDevice();
1242
1243		context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1244	}
1245
1246private:
1247	vk::VkDeviceMemory	m_memory;
1248	vk::VkDeviceSize	m_size;
1249};
1250
1251class UnMap : public Command
1252{
1253public:
1254						UnMap		(void) {}
1255						~UnMap		(void) {}
1256	const char*			getName		(void) const { return "UnMap"; }
1257
1258	void				logExecute	(TestLog& log, size_t commandIndex) const
1259	{
1260		log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1261	}
1262
1263	void				prepare		(PrepareContext& context)
1264	{
1265		m_memory	= context.getMemory().getMemory();
1266	}
1267
1268	void				execute		(ExecuteContext& context)
1269	{
1270		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1271		const vk::VkDevice			device	= context.getContext().getDevice();
1272
1273		vkd.unmapMemory(device, m_memory);
1274		context.setMapping(DE_NULL);
1275	}
1276
1277private:
1278	vk::VkDeviceMemory	m_memory;
1279};
1280
1281class Invalidate : public Command
1282{
1283public:
1284						Invalidate	(void) {}
1285						~Invalidate	(void) {}
1286	const char*			getName		(void) const { return "Invalidate"; }
1287
1288	void				logExecute	(TestLog& log, size_t commandIndex) const
1289	{
1290		log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1291	}
1292
1293	void				prepare		(PrepareContext& context)
1294	{
1295		m_memory	= context.getMemory().getMemory();
1296		m_size		= context.getMemory().getSize();
1297	}
1298
1299	void				execute		(ExecuteContext& context)
1300	{
1301		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1302		const vk::VkDevice			device	= context.getContext().getDevice();
1303
1304		vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1305	}
1306
1307private:
1308	vk::VkDeviceMemory	m_memory;
1309	vk::VkDeviceSize	m_size;
1310};
1311
1312class Flush : public Command
1313{
1314public:
1315						Flush		(void) {}
1316						~Flush		(void) {}
1317	const char*			getName		(void) const { return "Flush"; }
1318
1319	void				logExecute	(TestLog& log, size_t commandIndex) const
1320	{
1321		log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1322	}
1323
1324	void				prepare		(PrepareContext& context)
1325	{
1326		m_memory	= context.getMemory().getMemory();
1327		m_size		= context.getMemory().getSize();
1328	}
1329
1330	void				execute		(ExecuteContext& context)
1331	{
1332		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1333		const vk::VkDevice			device	= context.getContext().getDevice();
1334
1335		vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1336	}
1337
1338private:
1339	vk::VkDeviceMemory	m_memory;
1340	vk::VkDeviceSize	m_size;
1341};
1342
1343// Host memory reads and writes
1344class HostMemoryAccess : public Command
1345{
1346public:
1347					HostMemoryAccess	(bool read, bool write, deUint32 seed);
1348					~HostMemoryAccess	(void) {}
1349	const char*		getName				(void) const { return "HostMemoryAccess"; }
1350
1351	void			logExecute			(TestLog& log, size_t commandIndex) const;
1352	void			prepare				(PrepareContext& context);
1353	void			execute				(ExecuteContext& context);
1354
1355	void			verify				(VerifyContext& context, size_t commandIndex);
1356
1357private:
1358	const bool		m_read;
1359	const bool		m_write;
1360	const deUint32	m_seed;
1361
1362	size_t			m_size;
1363	vector<deUint8>	m_readData;
1364};
1365
1366HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1367	: m_read	(read)
1368	, m_write	(write)
1369	, m_seed	(seed)
1370{
1371}
1372
1373void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1374{
1375	log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "")  << ", seed: " << m_seed << TestLog::EndMessage;
1376}
1377
1378void HostMemoryAccess::prepare (PrepareContext& context)
1379{
1380	m_size = (size_t)context.getMemory().getSize();
1381
1382	if (m_read)
1383		m_readData.resize(m_size, 0);
1384}
1385
1386void HostMemoryAccess::execute (ExecuteContext& context)
1387{
1388	de::Random		rng	(m_seed);
1389	deUint8* const	ptr	= (deUint8*)context.getMapping();
1390
1391	if (m_read && m_write)
1392	{
1393		for (size_t pos = 0; pos < m_size; pos++)
1394		{
1395			const deUint8	mask	= rng.getUint8();
1396			const deUint8	value	= ptr[pos];
1397
1398			m_readData[pos] = value;
1399			ptr[pos] = value ^ mask;
1400		}
1401	}
1402	else if (m_read)
1403	{
1404		for (size_t pos = 0; pos < m_size; pos++)
1405		{
1406			const deUint8	value	= ptr[pos];
1407
1408			m_readData[pos] = value;
1409		}
1410	}
1411	else if (m_write)
1412	{
1413		for (size_t pos = 0; pos < m_size; pos++)
1414		{
1415			const deUint8	value	= rng.getUint8();
1416
1417			ptr[pos] = value;
1418		}
1419	}
1420	else
1421		DE_FATAL("Host memory access without read or write.");
1422}
1423
1424void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1425{
1426	tcu::ResultCollector&	resultCollector	= context.getResultCollector();
1427	ReferenceMemory&		reference		= context.getReference();
1428	de::Random				rng				(m_seed);
1429
1430	if (m_read && m_write)
1431	{
1432		for (size_t pos = 0; pos < m_size; pos++)
1433		{
1434			const deUint8	mask	= rng.getUint8();
1435			const deUint8	value	= m_readData[pos];
1436
1437			if (reference.isDefined(pos))
1438			{
1439				if (value != reference.get(pos))
1440				{
1441					resultCollector.fail(
1442							de::toString(commandIndex) + ":" + getName()
1443							+ " Result differs from reference, Expected: "
1444							+ de::toString(tcu::toHex<8>(reference.get(pos)))
1445							+ ", Got: "
1446							+ de::toString(tcu::toHex<8>(value))
1447							+ ", At offset: "
1448							+ de::toString(pos));
1449					break;
1450				}
1451
1452				reference.set(pos, reference.get(pos) ^ mask);
1453			}
1454		}
1455	}
1456	else if (m_read)
1457	{
1458		for (size_t pos = 0; pos < m_size; pos++)
1459		{
1460			const deUint8	value	= m_readData[pos];
1461
1462			if (reference.isDefined(pos))
1463			{
1464				if (value != reference.get(pos))
1465				{
1466					resultCollector.fail(
1467							de::toString(commandIndex) + ":" + getName()
1468							+ " Result differs from reference, Expected: "
1469							+ de::toString(tcu::toHex<8>(reference.get(pos)))
1470							+ ", Got: "
1471							+ de::toString(tcu::toHex<8>(value))
1472							+ ", At offset: "
1473							+ de::toString(pos));
1474					break;
1475				}
1476			}
1477		}
1478	}
1479	else if (m_write)
1480	{
1481		for (size_t pos = 0; pos < m_size; pos++)
1482		{
1483			const deUint8	value	= rng.getUint8();
1484
1485			reference.set(pos, value);
1486		}
1487	}
1488	else
1489		DE_FATAL("Host memory access without read or write.");
1490}
1491
1492class CreateBuffer : public Command
1493{
1494public:
1495									CreateBuffer	(vk::VkBufferUsageFlags	usage,
1496													 vk::VkSharingMode		sharing);
1497									~CreateBuffer	(void) {}
1498	const char*						getName			(void) const { return "CreateBuffer"; }
1499
1500	void							logPrepare		(TestLog& log, size_t commandIndex) const;
1501	void							prepare			(PrepareContext& context);
1502
1503private:
1504	const vk::VkBufferUsageFlags	m_usage;
1505	const vk::VkSharingMode			m_sharing;
1506};
1507
1508CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags	usage,
1509							vk::VkSharingMode		sharing)
1510	: m_usage	(usage)
1511	, m_sharing	(sharing)
1512{
1513}
1514
1515void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1516{
1517	log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1518}
1519
1520void CreateBuffer::prepare (PrepareContext& context)
1521{
1522	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1523	const vk::VkDevice			device			= context.getContext().getDevice();
1524	const vk::VkDeviceSize		bufferSize		= context.getMemory().getMaxBufferSize();
1525	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
1526
1527	context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1528}
1529
1530class DestroyBuffer : public Command
1531{
1532public:
1533							DestroyBuffer	(void);
1534							~DestroyBuffer	(void) {}
1535	const char*				getName			(void) const { return "DestroyBuffer"; }
1536
1537	void					logExecute		(TestLog& log, size_t commandIndex) const;
1538	void					prepare			(PrepareContext& context);
1539	void					execute			(ExecuteContext& context);
1540
1541private:
1542	vk::Move<vk::VkBuffer>	m_buffer;
1543};
1544
1545DestroyBuffer::DestroyBuffer (void)
1546{
1547}
1548
1549void DestroyBuffer::prepare (PrepareContext& context)
1550{
1551	m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1552	context.releaseBuffer();
1553}
1554
1555void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1556{
1557	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1558}
1559
1560void DestroyBuffer::execute (ExecuteContext& context)
1561{
1562	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1563	const vk::VkDevice			device			= context.getContext().getDevice();
1564
1565	vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1566}
1567
1568class BindBufferMemory : public Command
1569{
1570public:
1571				BindBufferMemory	(void) {}
1572				~BindBufferMemory	(void) {}
1573	const char*	getName				(void) const { return "BindBufferMemory"; }
1574
1575	void		logPrepare			(TestLog& log, size_t commandIndex) const;
1576	void		prepare				(PrepareContext& context);
1577};
1578
1579void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1580{
1581	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1582}
1583
1584void BindBufferMemory::prepare (PrepareContext& context)
1585{
1586	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1587	const vk::VkDevice			device			= context.getContext().getDevice();
1588
1589	VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1590}
1591
1592class CreateImage : public Command
1593{
1594public:
1595									CreateImage		(vk::VkImageUsageFlags	usage,
1596													 vk::VkSharingMode		sharing);
1597									~CreateImage	(void) {}
1598	const char*						getName			(void) const { return "CreateImage"; }
1599
1600	void							logPrepare		(TestLog& log, size_t commandIndex) const;
1601	void							prepare			(PrepareContext& context);
1602	void							verify			(VerifyContext& context, size_t commandIndex);
1603
1604private:
1605	const vk::VkImageUsageFlags	m_usage;
1606	const vk::VkSharingMode		m_sharing;
1607	deInt32						m_imageWidth;
1608	deInt32						m_imageHeight;
1609};
1610
1611CreateImage::CreateImage (vk::VkImageUsageFlags	usage,
1612						  vk::VkSharingMode		sharing)
1613	: m_usage	(usage)
1614	, m_sharing	(sharing)
1615{
1616}
1617
1618void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1619{
1620	log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage)  << TestLog::EndMessage;
1621}
1622
1623void CreateImage::prepare (PrepareContext& context)
1624{
1625	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1626	const vk::VkDevice			device			= context.getContext().getDevice();
1627	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
1628
1629	m_imageWidth	= context.getMemory().getMaxImageWidth();
1630	m_imageHeight	= context.getMemory().getMaxImageHeight();
1631
1632	{
1633		const vk::VkImageCreateInfo	createInfo		=
1634		{
1635			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1636			DE_NULL,
1637
1638			0u,
1639			vk::VK_IMAGE_TYPE_2D,
1640			vk::VK_FORMAT_R8G8B8A8_UNORM,
1641			{
1642				(deUint32)m_imageWidth,
1643				(deUint32)m_imageHeight,
1644				1u,
1645			},
1646			1u, 1u,
1647			vk::VK_SAMPLE_COUNT_1_BIT,
1648			vk::VK_IMAGE_TILING_OPTIMAL,
1649			m_usage,
1650			m_sharing,
1651			(deUint32)queueFamilies.size(),
1652			&queueFamilies[0],
1653			vk::VK_IMAGE_LAYOUT_UNDEFINED
1654		};
1655		vk::Move<vk::VkImage>			image			(createImage(vkd, device, &createInfo));
1656		const vk::VkMemoryRequirements	requirements	= vk::getImageMemoryRequirements(vkd, device, *image);
1657
1658		context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1659	}
1660}
1661
1662void CreateImage::verify (VerifyContext& context, size_t)
1663{
1664	context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1665}
1666
1667class DestroyImage : public Command
1668{
1669public:
1670							DestroyImage	(void);
1671							~DestroyImage	(void) {}
1672	const char*				getName			(void) const { return "DestroyImage"; }
1673
1674	void					logExecute		(TestLog& log, size_t commandIndex) const;
1675	void					prepare			(PrepareContext& context);
1676	void					execute			(ExecuteContext& context);
1677
1678private:
1679	vk::Move<vk::VkImage>	m_image;
1680};
1681
1682DestroyImage::DestroyImage (void)
1683{
1684}
1685
1686void DestroyImage::prepare (PrepareContext& context)
1687{
1688	m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1689	context.releaseImage();
1690}
1691
1692
1693void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1694{
1695	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1696}
1697
1698void DestroyImage::execute (ExecuteContext& context)
1699{
1700	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1701	const vk::VkDevice			device			= context.getContext().getDevice();
1702
1703	vkd.destroyImage(device, m_image.disown(), DE_NULL);
1704}
1705
1706class BindImageMemory : public Command
1707{
1708public:
1709				BindImageMemory		(void) {}
1710				~BindImageMemory	(void) {}
1711	const char*	getName				(void) const { return "BindImageMemory"; }
1712
1713	void		logPrepare			(TestLog& log, size_t commandIndex) const;
1714	void		prepare				(PrepareContext& context);
1715};
1716
1717void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1718{
1719	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1720}
1721
1722void BindImageMemory::prepare (PrepareContext& context)
1723{
1724	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
1725	const vk::VkDevice				device			= context.getContext().getDevice();
1726
1727	VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1728}
1729
1730class QueueWaitIdle : public Command
1731{
1732public:
1733				QueueWaitIdle	(void) {}
1734				~QueueWaitIdle	(void) {}
1735	const char*	getName			(void) const { return "QueuetWaitIdle"; }
1736
1737	void		logExecute		(TestLog& log, size_t commandIndex) const;
1738	void		execute			(ExecuteContext& context);
1739};
1740
1741void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1742{
1743	log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1744}
1745
1746void QueueWaitIdle::execute (ExecuteContext& context)
1747{
1748	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1749	const vk::VkQueue			queue	= context.getContext().getQueue();
1750
1751	VK_CHECK(vkd.queueWaitIdle(queue));
1752}
1753
1754class DeviceWaitIdle : public Command
1755{
1756public:
1757				DeviceWaitIdle	(void) {}
1758				~DeviceWaitIdle	(void) {}
1759	const char*	getName			(void) const { return "DeviceWaitIdle"; }
1760
1761	void		logExecute		(TestLog& log, size_t commandIndex) const;
1762	void		execute			(ExecuteContext& context);
1763};
1764
1765void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1766{
1767	log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1768}
1769
1770void DeviceWaitIdle::execute (ExecuteContext& context)
1771{
1772	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1773	const vk::VkDevice			device	= context.getContext().getDevice();
1774
1775	VK_CHECK(vkd.deviceWaitIdle(device));
1776}
1777
1778class SubmitContext
1779{
1780public:
1781								SubmitContext		(const PrepareContext&		context,
1782													 const vk::VkCommandBuffer	commandBuffer)
1783		: m_context			(context)
1784		, m_commandBuffer	(commandBuffer)
1785	{
1786	}
1787
1788	const Memory&				getMemory			(void) const { return m_context.getMemory(); }
1789	const Context&				getContext			(void) const { return m_context.getContext(); }
1790	vk::VkCommandBuffer			getCommandBuffer	(void) const { return m_commandBuffer; }
1791
1792	vk::VkBuffer				getBuffer			(void) const { return m_context.getBuffer(); }
1793	vk::VkDeviceSize			getBufferSize		(void) const { return m_context.getBufferSize(); }
1794
1795	vk::VkImage					getImage			(void) const { return m_context.getImage(); }
1796	deInt32						getImageWidth		(void) const { return m_context.getImageWidth(); }
1797	deInt32						getImageHeight		(void) const { return m_context.getImageHeight(); }
1798
1799private:
1800	const PrepareContext&		m_context;
1801	const vk::VkCommandBuffer	m_commandBuffer;
1802};
1803
1804class CmdCommand
1805{
1806public:
1807	virtual				~CmdCommand	(void) {}
1808	virtual const char*	getName		(void) const = 0;
1809
1810	// Log things that are done during prepare
1811	virtual void		logPrepare	(TestLog&, size_t) const {}
1812	// Log submitted calls etc.
1813	virtual void		logSubmit	(TestLog&, size_t) const {}
1814
1815	// Allocate vulkan resources and prepare for submit.
1816	virtual void		prepare		(PrepareContext&) {}
1817
1818	// Submit commands to command buffer.
1819	virtual void		submit		(SubmitContext&) {}
1820
1821	// Verify results
1822	virtual void		verify		(VerifyContext&, size_t) {}
1823};
1824
1825class SubmitCommandBuffer : public Command
1826{
1827public:
1828					SubmitCommandBuffer		(const vector<CmdCommand*>& commands);
1829					~SubmitCommandBuffer	(void);
1830
1831	const char*		getName					(void) const { return "SubmitCommandBuffer"; }
1832	void			logExecute				(TestLog& log, size_t commandIndex) const;
1833	void			logPrepare				(TestLog& log, size_t commandIndex) const;
1834
1835	// Allocate command buffer and submit commands to command buffer
1836	void			prepare					(PrepareContext& context);
1837	void			execute					(ExecuteContext& context);
1838
1839	// Verify that results are correct.
1840	void			verify					(VerifyContext& context, size_t commandIndex);
1841
1842private:
1843	vector<CmdCommand*>				m_commands;
1844	vk::Move<vk::VkCommandBuffer>	m_commandBuffer;
1845};
1846
1847SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1848	: m_commands	(commands)
1849{
1850}
1851
1852SubmitCommandBuffer::~SubmitCommandBuffer (void)
1853{
1854	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1855		delete m_commands[cmdNdx];
1856}
1857
1858void SubmitCommandBuffer::prepare (PrepareContext& context)
1859{
1860	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
1861	const vk::VkDevice			device		= context.getContext().getDevice();
1862	const vk::VkCommandPool		commandPool	= context.getContext().getCommandPool();
1863
1864	m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1865
1866	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1867	{
1868		CmdCommand& command = *m_commands[cmdNdx];
1869
1870		command.prepare(context);
1871	}
1872
1873	{
1874		SubmitContext submitContext (context, *m_commandBuffer);
1875
1876		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1877		{
1878			CmdCommand& command = *m_commands[cmdNdx];
1879
1880			command.submit(submitContext);
1881		}
1882
1883		VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1884	}
1885}
1886
1887void SubmitCommandBuffer::execute (ExecuteContext& context)
1888{
1889	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1890	const vk::VkCommandBuffer	cmd		= *m_commandBuffer;
1891	const vk::VkQueue			queue	= context.getContext().getQueue();
1892	const vk::VkSubmitInfo		submit	=
1893	{
1894		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1895		DE_NULL,
1896
1897		0,
1898		DE_NULL,
1899		(const vk::VkPipelineStageFlags*)DE_NULL,
1900
1901		1,
1902		&cmd,
1903
1904		0,
1905		DE_NULL
1906	};
1907
1908	vkd.queueSubmit(queue, 1, &submit, 0);
1909}
1910
1911void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1912{
1913	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1914	const tcu::ScopedLogSection	section		(context.getLog(), sectionName, sectionName);
1915
1916	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1917		m_commands[cmdNdx]->verify(context, cmdNdx);
1918}
1919
1920void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1921{
1922	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1923	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
1924
1925	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1926		m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1927}
1928
1929void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1930{
1931	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1932	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
1933
1934	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1935		m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1936}
1937
1938class PipelineBarrier : public CmdCommand
1939{
1940public:
1941	enum Type
1942	{
1943		TYPE_GLOBAL = 0,
1944		TYPE_BUFFER,
1945		TYPE_IMAGE,
1946		TYPE_LAST
1947	};
1948									PipelineBarrier		(const vk::VkPipelineStageFlags			srcStages,
1949														 const vk::VkAccessFlags				srcAccesses,
1950														 const vk::VkPipelineStageFlags			dstStages,
1951														 const vk::VkAccessFlags				dstAccesses,
1952														 Type									type,
1953														 const tcu::Maybe<vk::VkImageLayout>	imageLayout);
1954									~PipelineBarrier	(void) {}
1955	const char*						getName				(void) const { return "PipelineBarrier"; }
1956
1957	void							logSubmit			(TestLog& log, size_t commandIndex) const;
1958	void							submit				(SubmitContext& context);
1959
1960private:
1961	const vk::VkPipelineStageFlags		m_srcStages;
1962	const vk::VkAccessFlags				m_srcAccesses;
1963	const vk::VkPipelineStageFlags		m_dstStages;
1964	const vk::VkAccessFlags				m_dstAccesses;
1965	const Type							m_type;
1966	const tcu::Maybe<vk::VkImageLayout>	m_imageLayout;
1967};
1968
1969PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags		srcStages,
1970								  const vk::VkAccessFlags				srcAccesses,
1971								  const vk::VkPipelineStageFlags		dstStages,
1972								  const vk::VkAccessFlags				dstAccesses,
1973								  Type									type,
1974								  const tcu::Maybe<vk::VkImageLayout>	imageLayout)
1975	: m_srcStages	(srcStages)
1976	, m_srcAccesses	(srcAccesses)
1977	, m_dstStages	(dstStages)
1978	, m_dstAccesses	(dstAccesses)
1979	, m_type		(type)
1980	, m_imageLayout	(imageLayout)
1981{
1982}
1983
1984void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
1985{
1986	log << TestLog::Message << commandIndex << ":" << getName()
1987		<< " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
1988					: m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
1989					: "Image pipeline barrier")
1990		<< ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
1991		<< ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
1992}
1993
1994void PipelineBarrier::submit (SubmitContext& context)
1995{
1996	const vk::DeviceInterface&	vkd	= context.getContext().getDeviceInterface();
1997	const vk::VkCommandBuffer	cmd	= context.getCommandBuffer();
1998
1999	// \todo [2016-01-08 pyry] This could be cleaned up thanks to latest API changes
2000
2001	switch (m_type)
2002	{
2003		case TYPE_GLOBAL:
2004		{
2005			const vk::VkMemoryBarrier	barrier		=
2006			{
2007				vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2008				DE_NULL,
2009
2010				m_srcAccesses,
2011				m_dstAccesses
2012			};
2013
2014			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2015			break;
2016		}
2017
2018		case TYPE_BUFFER:
2019		{
2020			const vk::VkBufferMemoryBarrier	barrier		=
2021			{
2022				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2023				DE_NULL,
2024
2025				m_srcAccesses,
2026				m_dstAccesses,
2027
2028				vk::VK_QUEUE_FAMILY_IGNORED,
2029				vk::VK_QUEUE_FAMILY_IGNORED,
2030
2031				context.getBuffer(),
2032				0,
2033				vk::VK_WHOLE_SIZE
2034			};
2035
2036			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2037			break;
2038		}
2039
2040		case TYPE_IMAGE:
2041		{
2042			const vk::VkImageMemoryBarrier	barrier		=
2043			{
2044				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2045				DE_NULL,
2046
2047				m_srcAccesses,
2048				m_dstAccesses,
2049
2050				*m_imageLayout,
2051				*m_imageLayout,
2052
2053				vk::VK_QUEUE_FAMILY_IGNORED,
2054				vk::VK_QUEUE_FAMILY_IGNORED,
2055
2056				context.getImage(),
2057				{
2058					vk::VK_IMAGE_ASPECT_COLOR_BIT,
2059					0, 1,
2060					0, 1
2061				}
2062			};
2063
2064			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2065			break;
2066		}
2067
2068		default:
2069			DE_FATAL("Unknown pipeline barrier type");
2070	}
2071}
2072
2073class ImageTransition : public CmdCommand
2074{
2075public:
2076						ImageTransition		(vk::VkPipelineStageFlags	srcStages,
2077											 vk::VkAccessFlags			srcAccesses,
2078
2079											 vk::VkPipelineStageFlags	dstStages,
2080											 vk::VkAccessFlags			dstAccesses,
2081
2082											 vk::VkImageLayout			srcLayout,
2083											 vk::VkImageLayout			dstLayout);
2084
2085						~ImageTransition	(void) {}
2086	const char*			getName				(void) const { return "ImageTransition"; }
2087
2088	void				prepare				(PrepareContext& context);
2089	void				logSubmit			(TestLog& log, size_t commandIndex) const;
2090	void				submit				(SubmitContext& context);
2091	void				verify				(VerifyContext& context, size_t);
2092
2093private:
2094	const vk::VkPipelineStageFlags	m_srcStages;
2095	const vk::VkAccessFlags			m_srcAccesses;
2096	const vk::VkPipelineStageFlags	m_dstStages;
2097	const vk::VkAccessFlags			m_dstAccesses;
2098	const vk::VkImageLayout			m_srcLayout;
2099	const vk::VkImageLayout			m_dstLayout;
2100
2101	vk::VkDeviceSize				m_imageMemorySize;
2102};
2103
2104ImageTransition::ImageTransition (vk::VkPipelineStageFlags	srcStages,
2105								  vk::VkAccessFlags			srcAccesses,
2106
2107								  vk::VkPipelineStageFlags	dstStages,
2108								  vk::VkAccessFlags			dstAccesses,
2109
2110								  vk::VkImageLayout			srcLayout,
2111								  vk::VkImageLayout			dstLayout)
2112	: m_srcStages		(srcStages)
2113	, m_srcAccesses		(srcAccesses)
2114	, m_dstStages		(dstStages)
2115	, m_dstAccesses		(dstAccesses)
2116	, m_srcLayout		(srcLayout)
2117	, m_dstLayout		(dstLayout)
2118{
2119}
2120
2121void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2122{
2123	log << TestLog::Message << commandIndex << ":" << getName()
2124		<< " Image transition pipeline barrier"
2125		<< ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2126		<< ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2127		<< ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2128}
2129
2130void ImageTransition::prepare (PrepareContext& context)
2131{
2132	DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2133
2134	context.setImageLayout(m_dstLayout);
2135	m_imageMemorySize = context.getImageMemorySize();
2136}
2137
2138void ImageTransition::submit (SubmitContext& context)
2139{
2140	const vk::DeviceInterface&		vkd			= context.getContext().getDeviceInterface();
2141	const vk::VkCommandBuffer		cmd			= context.getCommandBuffer();
2142	const vk::VkImageMemoryBarrier	barrier		=
2143	{
2144		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2145		DE_NULL,
2146
2147		m_srcAccesses,
2148		m_dstAccesses,
2149
2150		m_srcLayout,
2151		m_dstLayout,
2152
2153		vk::VK_QUEUE_FAMILY_IGNORED,
2154		vk::VK_QUEUE_FAMILY_IGNORED,
2155
2156		context.getImage(),
2157		{
2158			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2159			0u, 1u,
2160			0u, 1u
2161		}
2162	};
2163
2164	vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2165}
2166
2167void ImageTransition::verify (VerifyContext& context, size_t)
2168{
2169	context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2170}
2171
2172class FillBuffer : public CmdCommand
2173{
2174public:
2175						FillBuffer	(deUint32 value) : m_value(value) {}
2176						~FillBuffer	(void) {}
2177	const char*			getName		(void) const { return "FillBuffer"; }
2178
2179	void				logSubmit	(TestLog& log, size_t commandIndex) const;
2180	void				submit		(SubmitContext& context);
2181	void				verify		(VerifyContext& context, size_t commandIndex);
2182
2183private:
2184	const deUint32		m_value;
2185	vk::VkDeviceSize	m_bufferSize;
2186};
2187
2188void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2189{
2190	log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2191}
2192
2193void FillBuffer::submit (SubmitContext& context)
2194{
2195	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
2196	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
2197	const vk::VkBuffer			buffer		= context.getBuffer();
2198	const vk::VkDeviceSize		sizeMask	= ~(0x3ull); // \note Round down to multiple of 4
2199
2200	m_bufferSize = sizeMask & context.getBufferSize();
2201	vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2202}
2203
2204void FillBuffer::verify (VerifyContext& context, size_t)
2205{
2206	ReferenceMemory&	reference	= context.getReference();
2207
2208	for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2209	{
2210#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2211		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2212#else
2213		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2214#endif
2215	}
2216}
2217
2218class UpdateBuffer : public CmdCommand
2219{
2220public:
2221						UpdateBuffer	(deUint32 seed) : m_seed(seed) {}
2222						~UpdateBuffer	(void) {}
2223	const char*			getName			(void) const { return "UpdateBuffer"; }
2224
2225	void				logSubmit		(TestLog& log, size_t commandIndex) const;
2226	void				submit			(SubmitContext& context);
2227	void				verify			(VerifyContext& context, size_t commandIndex);
2228
2229private:
2230	const deUint32		m_seed;
2231	vk::VkDeviceSize	m_bufferSize;
2232};
2233
2234void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2235{
2236	log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2237}
2238
2239void UpdateBuffer::submit (SubmitContext& context)
2240{
2241	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
2242	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
2243	const vk::VkBuffer			buffer		= context.getBuffer();
2244	const size_t				blockSize	= 65536;
2245	std::vector<deUint8>		data		(blockSize, 0);
2246	de::Random					rng			(m_seed);
2247
2248	m_bufferSize = context.getBufferSize();
2249
2250	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2251	{
2252		for (size_t ndx = 0; ndx < data.size(); ndx++)
2253			data[ndx] = rng.getUint8();
2254
2255		if (m_bufferSize - updated > blockSize)
2256			vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2257		else
2258			vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2259	}
2260}
2261
2262void UpdateBuffer::verify (VerifyContext& context, size_t)
2263{
2264	ReferenceMemory&	reference	= context.getReference();
2265	const size_t		blockSize	= 65536;
2266	vector<deUint8>		data		(blockSize, 0);
2267	de::Random			rng			(m_seed);
2268
2269	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2270	{
2271		for (size_t ndx = 0; ndx < data.size(); ndx++)
2272			data[ndx] = rng.getUint8();
2273
2274		if (m_bufferSize - updated > blockSize)
2275			reference.setData(updated, blockSize, &data[0]);
2276		else
2277			reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2278	}
2279}
2280
2281class BufferCopyToBuffer : public CmdCommand
2282{
2283public:
2284									BufferCopyToBuffer	(void) {}
2285									~BufferCopyToBuffer	(void) {}
2286	const char*						getName				(void) const { return "BufferCopyToBuffer"; }
2287
2288	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2289	void							prepare				(PrepareContext& context);
2290	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2291	void							submit				(SubmitContext& context);
2292	void							verify				(VerifyContext& context, size_t commandIndex);
2293
2294private:
2295	vk::VkDeviceSize				m_bufferSize;
2296	vk::Move<vk::VkBuffer>			m_dstBuffer;
2297	vk::Move<vk::VkDeviceMemory>	m_memory;
2298};
2299
2300void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2301{
2302	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2303}
2304
2305void BufferCopyToBuffer::prepare (PrepareContext& context)
2306{
2307	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2308	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2309	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2310	const vk::VkDevice				device			= context.getContext().getDevice();
2311	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2312
2313	m_bufferSize = context.getBufferSize();
2314
2315	m_dstBuffer	= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2316	m_memory	= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2317}
2318
2319void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2320{
2321	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2322}
2323
2324void BufferCopyToBuffer::submit (SubmitContext& context)
2325{
2326	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2327	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2328	const vk::VkBufferCopy		range			=
2329	{
2330		0, 0, // Offsets
2331		m_bufferSize
2332	};
2333
2334	vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2335}
2336
2337void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2338{
2339	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
2340	ReferenceMemory&						reference		(context.getReference());
2341	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
2342	const vk::VkDevice						device			= context.getContext().getDevice();
2343	const vk::VkQueue						queue			= context.getContext().getQueue();
2344	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
2345	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2346	const vk::VkBufferMemoryBarrier			barrier			=
2347	{
2348		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2349		DE_NULL,
2350
2351		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2352		vk::VK_ACCESS_HOST_READ_BIT,
2353
2354		vk::VK_QUEUE_FAMILY_IGNORED,
2355		vk::VK_QUEUE_FAMILY_IGNORED,
2356		*m_dstBuffer,
2357		0,
2358		vk::VK_WHOLE_SIZE
2359	};
2360
2361	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2362
2363	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2364	queueRun(vkd, queue, *commandBuffer);
2365
2366	{
2367		void* const	ptr		= mapMemory(vkd, device, *m_memory, m_bufferSize);
2368		bool		isOk	= true;
2369
2370		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2371
2372		{
2373			const deUint8* const data = (const deUint8*)ptr;
2374
2375			for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2376			{
2377				if (reference.isDefined(pos))
2378				{
2379					if (data[pos] != reference.get(pos))
2380					{
2381						resultCollector.fail(
2382								de::toString(commandIndex) + ":" + getName()
2383								+ " Result differs from reference, Expected: "
2384								+ de::toString(tcu::toHex<8>(reference.get(pos)))
2385								+ ", Got: "
2386								+ de::toString(tcu::toHex<8>(data[pos]))
2387								+ ", At offset: "
2388								+ de::toString(pos));
2389						break;
2390					}
2391				}
2392			}
2393		}
2394
2395		vkd.unmapMemory(device, *m_memory);
2396
2397		if (!isOk)
2398			context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2399	}
2400}
2401
2402class BufferCopyFromBuffer : public CmdCommand
2403{
2404public:
2405									BufferCopyFromBuffer	(deUint32 seed) : m_seed(seed) {}
2406									~BufferCopyFromBuffer	(void) {}
2407	const char*						getName					(void) const { return "BufferCopyFromBuffer"; }
2408
2409	void							logPrepare				(TestLog& log, size_t commandIndex) const;
2410	void							prepare					(PrepareContext& context);
2411	void							logSubmit				(TestLog& log, size_t commandIndex) const;
2412	void							submit					(SubmitContext& context);
2413	void							verify					(VerifyContext& context, size_t commandIndex);
2414
2415private:
2416	const deUint32					m_seed;
2417	vk::VkDeviceSize				m_bufferSize;
2418	vk::Move<vk::VkBuffer>			m_srcBuffer;
2419	vk::Move<vk::VkDeviceMemory>	m_memory;
2420};
2421
2422void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2423{
2424	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2425}
2426
2427void BufferCopyFromBuffer::prepare (PrepareContext& context)
2428{
2429	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2430	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2431	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2432	const vk::VkDevice				device			= context.getContext().getDevice();
2433	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2434
2435	m_bufferSize	= context.getBufferSize();
2436	m_srcBuffer		= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2437	m_memory		= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2438
2439	{
2440		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
2441		de::Random	rng	(m_seed);
2442
2443		{
2444			deUint8* const	data = (deUint8*)ptr;
2445
2446			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2447				data[ndx] = rng.getUint8();
2448		}
2449
2450		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2451		vkd.unmapMemory(device, *m_memory);
2452	}
2453}
2454
2455void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2456{
2457	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2458}
2459
2460void BufferCopyFromBuffer::submit (SubmitContext& context)
2461{
2462	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2463	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2464	const vk::VkBufferCopy		range			=
2465	{
2466		0, 0, // Offsets
2467		m_bufferSize
2468	};
2469
2470	vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2471}
2472
2473void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2474{
2475	ReferenceMemory&	reference	(context.getReference());
2476	de::Random			rng			(m_seed);
2477
2478	for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2479		reference.set(ndx, rng.getUint8());
2480}
2481
2482class BufferCopyToImage : public CmdCommand
2483{
2484public:
2485									BufferCopyToImage	(void) {}
2486									~BufferCopyToImage	(void) {}
2487	const char*						getName				(void) const { return "BufferCopyToImage"; }
2488
2489	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2490	void							prepare				(PrepareContext& context);
2491	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2492	void							submit				(SubmitContext& context);
2493	void							verify				(VerifyContext& context, size_t commandIndex);
2494
2495private:
2496	deInt32							m_imageWidth;
2497	deInt32							m_imageHeight;
2498	vk::Move<vk::VkImage>			m_dstImage;
2499	vk::Move<vk::VkDeviceMemory>	m_memory;
2500};
2501
2502void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2503{
2504	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2505}
2506
2507void BufferCopyToImage::prepare (PrepareContext& context)
2508{
2509	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2510	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2511	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2512	const vk::VkDevice				device			= context.getContext().getDevice();
2513	const vk::VkQueue				queue			= context.getContext().getQueue();
2514	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
2515	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2516	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
2517
2518	m_imageWidth	= imageSize[0];
2519	m_imageHeight	= imageSize[1];
2520
2521	{
2522		const vk::VkImageCreateInfo	createInfo =
2523		{
2524			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2525			DE_NULL,
2526
2527			0,
2528			vk::VK_IMAGE_TYPE_2D,
2529			vk::VK_FORMAT_R8G8B8A8_UNORM,
2530			{
2531				(deUint32)m_imageWidth,
2532				(deUint32)m_imageHeight,
2533				1u,
2534			},
2535			1, 1, // mipLevels, arrayLayers
2536			vk::VK_SAMPLE_COUNT_1_BIT,
2537
2538			vk::VK_IMAGE_TILING_OPTIMAL,
2539			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2540			vk::VK_SHARING_MODE_EXCLUSIVE,
2541
2542			(deUint32)queueFamilies.size(),
2543			&queueFamilies[0],
2544			vk::VK_IMAGE_LAYOUT_UNDEFINED
2545		};
2546
2547		m_dstImage = vk::createImage(vkd, device, &createInfo);
2548	}
2549
2550	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2551
2552	{
2553		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2554		const vk::VkImageMemoryBarrier			barrier			=
2555		{
2556			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2557			DE_NULL,
2558
2559			0,
2560			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2561
2562			vk::VK_IMAGE_LAYOUT_UNDEFINED,
2563			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2564
2565			vk::VK_QUEUE_FAMILY_IGNORED,
2566			vk::VK_QUEUE_FAMILY_IGNORED,
2567
2568			*m_dstImage,
2569			{
2570				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2571				0,	// Mip level
2572				1,	// Mip level count
2573				0,	// Layer
2574				1	// Layer count
2575			}
2576		};
2577
2578		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2579
2580		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2581		queueRun(vkd, queue, *commandBuffer);
2582	}
2583}
2584
2585void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2586{
2587	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2588}
2589
2590void BufferCopyToImage::submit (SubmitContext& context)
2591{
2592	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2593	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2594	const vk::VkBufferImageCopy	region			=
2595	{
2596		0,
2597		0, 0,
2598		{
2599			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2600			0,	// mipLevel
2601			0,	// arrayLayer
2602			1	// layerCount
2603		},
2604		{ 0, 0, 0 },
2605		{
2606			(deUint32)m_imageWidth,
2607			(deUint32)m_imageHeight,
2608			1u
2609		}
2610	};
2611
2612	vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2613}
2614
2615void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2616{
2617	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
2618	ReferenceMemory&						reference		(context.getReference());
2619	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
2620	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
2621	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
2622	const vk::VkDevice						device			= context.getContext().getDevice();
2623	const vk::VkQueue						queue			= context.getContext().getQueue();
2624	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
2625	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2626	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
2627	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2628	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2629	{
2630		const vk::VkImageMemoryBarrier		imageBarrier	=
2631		{
2632			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2633			DE_NULL,
2634
2635			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2636			vk::VK_ACCESS_TRANSFER_READ_BIT,
2637
2638			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2639			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2640
2641			vk::VK_QUEUE_FAMILY_IGNORED,
2642			vk::VK_QUEUE_FAMILY_IGNORED,
2643
2644			*m_dstImage,
2645			{
2646				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2647				0,	// Mip level
2648				1,	// Mip level count
2649				0,	// Layer
2650				1	// Layer count
2651			}
2652		};
2653		const vk::VkBufferMemoryBarrier bufferBarrier =
2654		{
2655			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2656			DE_NULL,
2657
2658			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2659			vk::VK_ACCESS_HOST_READ_BIT,
2660
2661			vk::VK_QUEUE_FAMILY_IGNORED,
2662			vk::VK_QUEUE_FAMILY_IGNORED,
2663			*dstBuffer,
2664			0,
2665			vk::VK_WHOLE_SIZE
2666		};
2667
2668		const vk::VkBufferImageCopy	region =
2669		{
2670			0,
2671			0, 0,
2672			{
2673				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2674				0,	// mipLevel
2675				0,	// arrayLayer
2676				1	// layerCount
2677			},
2678			{ 0, 0, 0 },
2679			{
2680				(deUint32)m_imageWidth,
2681				(deUint32)m_imageHeight,
2682				1u
2683			}
2684		};
2685
2686		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2687		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
2688		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2689	}
2690
2691	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2692	queueRun(vkd, queue, *commandBuffer);
2693
2694	{
2695		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2696
2697		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
2698
2699		{
2700			const deUint8* const	data = (const deUint8*)ptr;
2701
2702			for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2703			{
2704				if (reference.isDefined(pos))
2705				{
2706					if (data[pos] != reference.get(pos))
2707					{
2708						resultCollector.fail(
2709								de::toString(commandIndex) + ":" + getName()
2710								+ " Result differs from reference, Expected: "
2711								+ de::toString(tcu::toHex<8>(reference.get(pos)))
2712								+ ", Got: "
2713								+ de::toString(tcu::toHex<8>(data[pos]))
2714								+ ", At offset: "
2715								+ de::toString(pos));
2716						break;
2717					}
2718				}
2719			}
2720		}
2721
2722		vkd.unmapMemory(device, *memory);
2723	}
2724}
2725
2726class BufferCopyFromImage : public CmdCommand
2727{
2728public:
2729									BufferCopyFromImage		(deUint32 seed) : m_seed(seed) {}
2730									~BufferCopyFromImage	(void) {}
2731	const char*						getName					(void) const { return "BufferCopyFromImage"; }
2732
2733	void							logPrepare				(TestLog& log, size_t commandIndex) const;
2734	void							prepare					(PrepareContext& context);
2735	void							logSubmit				(TestLog& log, size_t commandIndex) const;
2736	void							submit					(SubmitContext& context);
2737	void							verify					(VerifyContext& context, size_t commandIndex);
2738
2739private:
2740	const deUint32					m_seed;
2741	deInt32							m_imageWidth;
2742	deInt32							m_imageHeight;
2743	vk::Move<vk::VkImage>			m_srcImage;
2744	vk::Move<vk::VkDeviceMemory>	m_memory;
2745};
2746
2747void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2748{
2749	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2750}
2751
2752void BufferCopyFromImage::prepare (PrepareContext& context)
2753{
2754	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2755	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2756	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2757	const vk::VkDevice				device			= context.getContext().getDevice();
2758	const vk::VkQueue				queue			= context.getContext().getQueue();
2759	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
2760	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2761	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
2762
2763	m_imageWidth	= imageSize[0];
2764	m_imageHeight	= imageSize[1];
2765
2766	{
2767		const vk::VkImageCreateInfo	createInfo =
2768		{
2769			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2770			DE_NULL,
2771
2772			0,
2773			vk::VK_IMAGE_TYPE_2D,
2774			vk::VK_FORMAT_R8G8B8A8_UNORM,
2775			{
2776				(deUint32)m_imageWidth,
2777				(deUint32)m_imageHeight,
2778				1u,
2779			},
2780			1, 1, // mipLevels, arrayLayers
2781			vk::VK_SAMPLE_COUNT_1_BIT,
2782
2783			vk::VK_IMAGE_TILING_OPTIMAL,
2784			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2785			vk::VK_SHARING_MODE_EXCLUSIVE,
2786
2787			(deUint32)queueFamilies.size(),
2788			&queueFamilies[0],
2789			vk::VK_IMAGE_LAYOUT_UNDEFINED
2790		};
2791
2792		m_srcImage = vk::createImage(vkd, device, &createInfo);
2793	}
2794
2795	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2796
2797	{
2798		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2799		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2800		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2801		const vk::VkImageMemoryBarrier			preImageBarrier	=
2802		{
2803			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2804			DE_NULL,
2805
2806			0,
2807			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2808
2809			vk::VK_IMAGE_LAYOUT_UNDEFINED,
2810			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2811
2812			vk::VK_QUEUE_FAMILY_IGNORED,
2813			vk::VK_QUEUE_FAMILY_IGNORED,
2814
2815			*m_srcImage,
2816			{
2817				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2818				0,	// Mip level
2819				1,	// Mip level count
2820				0,	// Layer
2821				1	// Layer count
2822			}
2823		};
2824		const vk::VkImageMemoryBarrier			postImageBarrier =
2825		{
2826			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2827			DE_NULL,
2828
2829			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2830			0,
2831
2832			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2833			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2834
2835			vk::VK_QUEUE_FAMILY_IGNORED,
2836			vk::VK_QUEUE_FAMILY_IGNORED,
2837
2838			*m_srcImage,
2839			{
2840				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2841				0,	// Mip level
2842				1,	// Mip level count
2843				0,	// Layer
2844				1	// Layer count
2845			}
2846		};
2847		const vk::VkBufferImageCopy				region				=
2848		{
2849			0,
2850			0, 0,
2851			{
2852				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2853				0,	// mipLevel
2854				0,	// arrayLayer
2855				1	// layerCount
2856			},
2857			{ 0, 0, 0 },
2858			{
2859				(deUint32)m_imageWidth,
2860				(deUint32)m_imageHeight,
2861				1u
2862			}
2863		};
2864
2865		{
2866			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2867			de::Random	rng	(m_seed);
2868
2869			{
2870				deUint8* const	data = (deUint8*)ptr;
2871
2872				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2873					data[ndx] = rng.getUint8();
2874			}
2875
2876			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2877			vkd.unmapMemory(device, *memory);
2878		}
2879
2880		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2881		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2882		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2883
2884		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2885		queueRun(vkd, queue, *commandBuffer);
2886	}
2887}
2888
2889void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2890{
2891	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2892}
2893
2894void BufferCopyFromImage::submit (SubmitContext& context)
2895{
2896	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2897	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2898	const vk::VkBufferImageCopy	region			=
2899	{
2900		0,
2901		0, 0,
2902		{
2903			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2904			0,	// mipLevel
2905			0,	// arrayLayer
2906			1	// layerCount
2907		},
2908		{ 0, 0, 0 },
2909		{
2910			(deUint32)m_imageWidth,
2911			(deUint32)m_imageHeight,
2912			1u
2913		}
2914	};
2915
2916	vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, &region);
2917}
2918
2919void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2920{
2921	ReferenceMemory&	reference		(context.getReference());
2922	de::Random			rng	(m_seed);
2923
2924	for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2925		reference.set(ndx, rng.getUint8());
2926}
2927
2928class ImageCopyToBuffer : public CmdCommand
2929{
2930public:
2931									ImageCopyToBuffer	(vk::VkImageLayout imageLayout) : m_imageLayout (imageLayout) {}
2932									~ImageCopyToBuffer	(void) {}
2933	const char*						getName				(void) const { return "BufferCopyToImage"; }
2934
2935	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2936	void							prepare				(PrepareContext& context);
2937	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2938	void							submit				(SubmitContext& context);
2939	void							verify				(VerifyContext& context, size_t commandIndex);
2940
2941private:
2942	vk::VkImageLayout				m_imageLayout;
2943	vk::VkDeviceSize				m_bufferSize;
2944	vk::Move<vk::VkBuffer>			m_dstBuffer;
2945	vk::Move<vk::VkDeviceMemory>	m_memory;
2946	vk::VkDeviceSize				m_imageMemorySize;
2947	deInt32							m_imageWidth;
2948	deInt32							m_imageHeight;
2949};
2950
2951void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2952{
2953	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2954}
2955
2956void ImageCopyToBuffer::prepare (PrepareContext& context)
2957{
2958	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2959	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2960	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2961	const vk::VkDevice				device			= context.getContext().getDevice();
2962	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2963
2964	m_imageWidth		= context.getImageWidth();
2965	m_imageHeight		= context.getImageHeight();
2966	m_bufferSize		= 4 * m_imageWidth * m_imageHeight;
2967	m_imageMemorySize	= context.getImageMemorySize();
2968	m_dstBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2969	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2970}
2971
2972void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2973{
2974	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2975}
2976
2977void ImageCopyToBuffer::submit (SubmitContext& context)
2978{
2979	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2980	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2981	const vk::VkBufferImageCopy	region			=
2982	{
2983		0,
2984		0, 0,
2985		{
2986			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2987			0,	// mipLevel
2988			0,	// arrayLayer
2989			1	// layerCount
2990		},
2991		{ 0, 0, 0 },
2992		{
2993			(deUint32)m_imageWidth,
2994			(deUint32)m_imageHeight,
2995			1u
2996		}
2997	};
2998
2999	vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), m_imageLayout, *m_dstBuffer, 1, &region);
3000}
3001
3002void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
3003{
3004	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
3005	ReferenceMemory&						reference		(context.getReference());
3006	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
3007	const vk::VkDevice						device			= context.getContext().getDevice();
3008	const vk::VkQueue						queue			= context.getContext().getQueue();
3009	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
3010	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3011	const vk::VkBufferMemoryBarrier			barrier			=
3012	{
3013		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3014		DE_NULL,
3015
3016		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3017		vk::VK_ACCESS_HOST_READ_BIT,
3018
3019		vk::VK_QUEUE_FAMILY_IGNORED,
3020		vk::VK_QUEUE_FAMILY_IGNORED,
3021		*m_dstBuffer,
3022		0,
3023		vk::VK_WHOLE_SIZE
3024	};
3025
3026	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3027
3028	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3029	queueRun(vkd, queue, *commandBuffer);
3030
3031	reference.setUndefined(0, (size_t)m_imageMemorySize);
3032	{
3033		void* const						ptr				= mapMemory(vkd, device, *m_memory, m_bufferSize);
3034		const ConstPixelBufferAccess	referenceImage	(context.getReferenceImage().getAccess());
3035		const ConstPixelBufferAccess	resultImage		(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3036
3037		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3038
3039		if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3040			resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3041
3042		vkd.unmapMemory(device, *m_memory);
3043	}
3044}
3045
3046class ImageCopyFromBuffer : public CmdCommand
3047{
3048public:
3049									ImageCopyFromBuffer		(deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3050									~ImageCopyFromBuffer	(void) {}
3051	const char*						getName					(void) const { return "ImageCopyFromBuffer"; }
3052
3053	void							logPrepare				(TestLog& log, size_t commandIndex) const;
3054	void							prepare					(PrepareContext& context);
3055	void							logSubmit				(TestLog& log, size_t commandIndex) const;
3056	void							submit					(SubmitContext& context);
3057	void							verify					(VerifyContext& context, size_t commandIndex);
3058
3059private:
3060	const deUint32					m_seed;
3061	const vk::VkImageLayout			m_imageLayout;
3062	deInt32							m_imageWidth;
3063	deInt32							m_imageHeight;
3064	vk::VkDeviceSize				m_imageMemorySize;
3065	vk::VkDeviceSize				m_bufferSize;
3066	vk::Move<vk::VkBuffer>			m_srcBuffer;
3067	vk::Move<vk::VkDeviceMemory>	m_memory;
3068};
3069
3070void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3071{
3072	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3073}
3074
3075void ImageCopyFromBuffer::prepare (PrepareContext& context)
3076{
3077	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3078	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3079	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3080	const vk::VkDevice				device			= context.getContext().getDevice();
3081	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3082
3083	m_imageWidth		= context.getImageHeight();
3084	m_imageHeight		= context.getImageWidth();
3085	m_imageMemorySize	= context.getImageMemorySize();
3086	m_bufferSize		= m_imageWidth * m_imageHeight * 4;
3087	m_srcBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3088	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3089
3090	{
3091		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
3092		de::Random	rng	(m_seed);
3093
3094		{
3095			deUint8* const	data = (deUint8*)ptr;
3096
3097			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3098				data[ndx] = rng.getUint8();
3099		}
3100
3101		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3102		vkd.unmapMemory(device, *m_memory);
3103	}
3104}
3105
3106void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3107{
3108	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3109}
3110
3111void ImageCopyFromBuffer::submit (SubmitContext& context)
3112{
3113	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3114	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3115	const vk::VkBufferImageCopy	region			=
3116	{
3117		0,
3118		0, 0,
3119		{
3120			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3121			0,	// mipLevel
3122			0,	// arrayLayer
3123			1	// layerCount
3124		},
3125		{ 0, 0, 0 },
3126		{
3127			(deUint32)m_imageWidth,
3128			(deUint32)m_imageHeight,
3129			1u
3130		}
3131	};
3132
3133	vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), m_imageLayout, 1, &region);
3134}
3135
3136void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3137{
3138	ReferenceMemory&	reference	(context.getReference());
3139	de::Random			rng			(m_seed);
3140
3141	reference.setUndefined(0, (size_t)m_imageMemorySize);
3142
3143	{
3144		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3145
3146		for (deInt32 y = 0; y < m_imageHeight; y++)
3147		for (deInt32 x = 0; x < m_imageWidth; x++)
3148		{
3149			const deUint8 r8 = rng.getUint8();
3150			const deUint8 g8 = rng.getUint8();
3151			const deUint8 b8 = rng.getUint8();
3152			const deUint8 a8 = rng.getUint8();
3153
3154			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3155		}
3156	}
3157}
3158
3159class ImageCopyFromImage : public CmdCommand
3160{
3161public:
3162									ImageCopyFromImage	(deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3163									~ImageCopyFromImage	(void) {}
3164	const char*						getName				(void) const { return "ImageCopyFromImage"; }
3165
3166	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3167	void							prepare				(PrepareContext& context);
3168	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3169	void							submit				(SubmitContext& context);
3170	void							verify				(VerifyContext& context, size_t commandIndex);
3171
3172private:
3173	const deUint32					m_seed;
3174	const vk::VkImageLayout			m_imageLayout;
3175	deInt32							m_imageWidth;
3176	deInt32							m_imageHeight;
3177	vk::VkDeviceSize				m_imageMemorySize;
3178	vk::Move<vk::VkImage>			m_srcImage;
3179	vk::Move<vk::VkDeviceMemory>	m_memory;
3180};
3181
3182void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3183{
3184	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3185}
3186
3187void ImageCopyFromImage::prepare (PrepareContext& context)
3188{
3189	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3190	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3191	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3192	const vk::VkDevice				device			= context.getContext().getDevice();
3193	const vk::VkQueue				queue			= context.getContext().getQueue();
3194	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3195	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3196
3197	m_imageWidth		= context.getImageWidth();
3198	m_imageHeight		= context.getImageHeight();
3199	m_imageMemorySize	= context.getImageMemorySize();
3200
3201	{
3202		const vk::VkImageCreateInfo	createInfo =
3203		{
3204			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3205			DE_NULL,
3206
3207			0,
3208			vk::VK_IMAGE_TYPE_2D,
3209			vk::VK_FORMAT_R8G8B8A8_UNORM,
3210			{
3211				(deUint32)m_imageWidth,
3212				(deUint32)m_imageHeight,
3213				1u,
3214			},
3215			1, 1, // mipLevels, arrayLayers
3216			vk::VK_SAMPLE_COUNT_1_BIT,
3217
3218			vk::VK_IMAGE_TILING_OPTIMAL,
3219			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3220			vk::VK_SHARING_MODE_EXCLUSIVE,
3221
3222			(deUint32)queueFamilies.size(),
3223			&queueFamilies[0],
3224			vk::VK_IMAGE_LAYOUT_UNDEFINED
3225		};
3226
3227		m_srcImage = vk::createImage(vkd, device, &createInfo);
3228	}
3229
3230	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3231
3232	{
3233		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3234		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3235		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3236		const vk::VkImageMemoryBarrier			preImageBarrier	=
3237		{
3238			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3239			DE_NULL,
3240
3241			0,
3242			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3243
3244			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3245			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3246
3247			vk::VK_QUEUE_FAMILY_IGNORED,
3248			vk::VK_QUEUE_FAMILY_IGNORED,
3249
3250			*m_srcImage,
3251			{
3252				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3253				0,	// Mip level
3254				1,	// Mip level count
3255				0,	// Layer
3256				1	// Layer count
3257			}
3258		};
3259		const vk::VkImageMemoryBarrier			postImageBarrier =
3260		{
3261			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3262			DE_NULL,
3263
3264			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3265			0,
3266
3267			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3268			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3269
3270			vk::VK_QUEUE_FAMILY_IGNORED,
3271			vk::VK_QUEUE_FAMILY_IGNORED,
3272
3273			*m_srcImage,
3274			{
3275				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3276				0,	// Mip level
3277				1,	// Mip level count
3278				0,	// Layer
3279				1	// Layer count
3280			}
3281		};
3282		const vk::VkBufferImageCopy				region				=
3283		{
3284			0,
3285			0, 0,
3286			{
3287				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3288				0,	// mipLevel
3289				0,	// arrayLayer
3290				1	// layerCount
3291			},
3292			{ 0, 0, 0 },
3293			{
3294				(deUint32)m_imageWidth,
3295				(deUint32)m_imageHeight,
3296				1u
3297			}
3298		};
3299
3300		{
3301			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3302			de::Random	rng	(m_seed);
3303
3304			{
3305				deUint8* const	data = (deUint8*)ptr;
3306
3307				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3308					data[ndx] = rng.getUint8();
3309			}
3310
3311			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3312			vkd.unmapMemory(device, *memory);
3313		}
3314
3315		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3316		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3317		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3318
3319		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3320		queueRun(vkd, queue, *commandBuffer);
3321	}
3322}
3323
3324void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3325{
3326	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3327}
3328
3329void ImageCopyFromImage::submit (SubmitContext& context)
3330{
3331	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3332	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3333	const vk::VkImageCopy		region			=
3334	{
3335		{
3336			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3337			0,	// mipLevel
3338			0,	// arrayLayer
3339			1	// layerCount
3340		},
3341		{ 0, 0, 0 },
3342
3343		{
3344			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3345			0,	// mipLevel
3346			0,	// arrayLayer
3347			1	// layerCount
3348		},
3349		{ 0, 0, 0 },
3350		{
3351			(deUint32)m_imageWidth,
3352			(deUint32)m_imageHeight,
3353			1u
3354		}
3355	};
3356
3357	vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, &region);
3358}
3359
3360void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3361{
3362	ReferenceMemory&	reference	(context.getReference());
3363	de::Random			rng			(m_seed);
3364
3365	reference.setUndefined(0, (size_t)m_imageMemorySize);
3366
3367	{
3368		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3369
3370		for (deInt32 y = 0; y < m_imageHeight; y++)
3371		for (deInt32 x = 0; x < m_imageWidth; x++)
3372		{
3373			const deUint8 r8 = rng.getUint8();
3374			const deUint8 g8 = rng.getUint8();
3375			const deUint8 b8 = rng.getUint8();
3376			const deUint8 a8 = rng.getUint8();
3377
3378			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3379		}
3380	}
3381}
3382
3383class ImageCopyToImage : public CmdCommand
3384{
3385public:
3386									ImageCopyToImage	(vk::VkImageLayout imageLayout) : m_imageLayout(imageLayout) {}
3387									~ImageCopyToImage	(void) {}
3388	const char*						getName				(void) const { return "ImageCopyToImage"; }
3389
3390	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3391	void							prepare				(PrepareContext& context);
3392	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3393	void							submit				(SubmitContext& context);
3394	void							verify				(VerifyContext& context, size_t commandIndex);
3395
3396private:
3397	const vk::VkImageLayout			m_imageLayout;
3398	deInt32							m_imageWidth;
3399	deInt32							m_imageHeight;
3400	vk::VkDeviceSize				m_imageMemorySize;
3401	vk::Move<vk::VkImage>			m_dstImage;
3402	vk::Move<vk::VkDeviceMemory>	m_memory;
3403};
3404
3405void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3406{
3407	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3408}
3409
3410void ImageCopyToImage::prepare (PrepareContext& context)
3411{
3412	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3413	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3414	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3415	const vk::VkDevice				device			= context.getContext().getDevice();
3416	const vk::VkQueue				queue			= context.getContext().getQueue();
3417	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3418	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3419
3420	m_imageWidth		= context.getImageWidth();
3421	m_imageHeight		= context.getImageHeight();
3422	m_imageMemorySize	= context.getImageMemorySize();
3423
3424	{
3425		const vk::VkImageCreateInfo	createInfo =
3426		{
3427			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3428			DE_NULL,
3429
3430			0,
3431			vk::VK_IMAGE_TYPE_2D,
3432			vk::VK_FORMAT_R8G8B8A8_UNORM,
3433			{
3434				(deUint32)m_imageWidth,
3435				(deUint32)m_imageHeight,
3436				1u,
3437			},
3438			1, 1, // mipLevels, arrayLayers
3439			vk::VK_SAMPLE_COUNT_1_BIT,
3440
3441			vk::VK_IMAGE_TILING_OPTIMAL,
3442			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3443			vk::VK_SHARING_MODE_EXCLUSIVE,
3444
3445			(deUint32)queueFamilies.size(),
3446			&queueFamilies[0],
3447			vk::VK_IMAGE_LAYOUT_UNDEFINED
3448		};
3449
3450		m_dstImage = vk::createImage(vkd, device, &createInfo);
3451	}
3452
3453	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3454
3455	{
3456		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3457		const vk::VkImageMemoryBarrier			barrier			=
3458		{
3459			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3460			DE_NULL,
3461
3462			0,
3463			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3464
3465			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3466			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3467
3468			vk::VK_QUEUE_FAMILY_IGNORED,
3469			vk::VK_QUEUE_FAMILY_IGNORED,
3470
3471			*m_dstImage,
3472			{
3473				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3474				0,	// Mip level
3475				1,	// Mip level count
3476				0,	// Layer
3477				1	// Layer count
3478			}
3479		};
3480
3481		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3482
3483		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3484		queueRun(vkd, queue, *commandBuffer);
3485	}
3486}
3487
3488void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3489{
3490	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3491}
3492
3493void ImageCopyToImage::submit (SubmitContext& context)
3494{
3495	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3496	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3497	const vk::VkImageCopy		region			=
3498	{
3499		{
3500			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3501			0,	// mipLevel
3502			0,	// arrayLayer
3503			1	// layerCount
3504		},
3505		{ 0, 0, 0 },
3506
3507		{
3508			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3509			0,	// mipLevel
3510			0,	// arrayLayer
3511			1	// layerCount
3512		},
3513		{ 0, 0, 0 },
3514		{
3515			(deUint32)m_imageWidth,
3516			(deUint32)m_imageHeight,
3517			1u
3518		}
3519	};
3520
3521	vkd.cmdCopyImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3522}
3523
3524void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3525{
3526	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
3527	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
3528	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
3529	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
3530	const vk::VkDevice						device			= context.getContext().getDevice();
3531	const vk::VkQueue						queue			= context.getContext().getQueue();
3532	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
3533	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3534	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
3535	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3536	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3537	{
3538		const vk::VkImageMemoryBarrier		imageBarrier	=
3539		{
3540			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3541			DE_NULL,
3542
3543			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3544			vk::VK_ACCESS_TRANSFER_READ_BIT,
3545
3546			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3547			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3548
3549			vk::VK_QUEUE_FAMILY_IGNORED,
3550			vk::VK_QUEUE_FAMILY_IGNORED,
3551
3552			*m_dstImage,
3553			{
3554				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3555				0,	// Mip level
3556				1,	// Mip level count
3557				0,	// Layer
3558				1	// Layer count
3559			}
3560		};
3561		const vk::VkBufferMemoryBarrier bufferBarrier =
3562		{
3563			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3564			DE_NULL,
3565
3566			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3567			vk::VK_ACCESS_HOST_READ_BIT,
3568
3569			vk::VK_QUEUE_FAMILY_IGNORED,
3570			vk::VK_QUEUE_FAMILY_IGNORED,
3571			*dstBuffer,
3572			0,
3573			vk::VK_WHOLE_SIZE
3574		};
3575		const vk::VkBufferImageCopy	region =
3576		{
3577			0,
3578			0, 0,
3579			{
3580				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3581				0,	// mipLevel
3582				0,	// arrayLayer
3583				1	// layerCount
3584			},
3585			{ 0, 0, 0 },
3586			{
3587				(deUint32)m_imageWidth,
3588				(deUint32)m_imageHeight,
3589				1u
3590			}
3591		};
3592
3593		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3594		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
3595		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3596	}
3597
3598	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3599	queueRun(vkd, queue, *commandBuffer);
3600
3601	{
3602		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3603
3604		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
3605
3606		{
3607			const deUint8* const			data		= (const deUint8*)ptr;
3608			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3609			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3610
3611			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3612				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3613		}
3614
3615		vkd.unmapMemory(device, *memory);
3616	}
3617}
3618
3619enum BlitScale
3620{
3621	BLIT_SCALE_20,
3622	BLIT_SCALE_10,
3623};
3624
3625class ImageBlitFromImage : public CmdCommand
3626{
3627public:
3628									ImageBlitFromImage	(deUint32 seed, BlitScale scale, vk::VkImageLayout imageLayout) : m_seed(seed), m_scale(scale), m_imageLayout(imageLayout) {}
3629									~ImageBlitFromImage	(void) {}
3630	const char*						getName				(void) const { return "ImageBlitFromImage"; }
3631
3632	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3633	void							prepare				(PrepareContext& context);
3634	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3635	void							submit				(SubmitContext& context);
3636	void							verify				(VerifyContext& context, size_t commandIndex);
3637
3638private:
3639	const deUint32					m_seed;
3640	const BlitScale					m_scale;
3641	const vk::VkImageLayout			m_imageLayout;
3642	deInt32							m_imageWidth;
3643	deInt32							m_imageHeight;
3644	vk::VkDeviceSize				m_imageMemorySize;
3645	deInt32							m_srcImageWidth;
3646	deInt32							m_srcImageHeight;
3647	vk::Move<vk::VkImage>			m_srcImage;
3648	vk::Move<vk::VkDeviceMemory>	m_memory;
3649};
3650
3651void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3652{
3653	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3654}
3655
3656void ImageBlitFromImage::prepare (PrepareContext& context)
3657{
3658	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3659	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3660	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3661	const vk::VkDevice				device			= context.getContext().getDevice();
3662	const vk::VkQueue				queue			= context.getContext().getQueue();
3663	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3664	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3665
3666	m_imageWidth		= context.getImageWidth();
3667	m_imageHeight		= context.getImageHeight();
3668	m_imageMemorySize	= context.getImageMemorySize();
3669
3670	if (m_scale == BLIT_SCALE_10)
3671	{
3672		m_srcImageWidth			= m_imageWidth;
3673		m_srcImageHeight		= m_imageHeight;
3674	}
3675	else if (m_scale == BLIT_SCALE_20)
3676	{
3677		m_srcImageWidth			= m_imageWidth / 2;
3678		m_srcImageHeight		= m_imageHeight / 2;
3679	}
3680	else
3681		DE_FATAL("Unsupported scale");
3682
3683	{
3684		const vk::VkImageCreateInfo	createInfo =
3685		{
3686			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3687			DE_NULL,
3688
3689			0,
3690			vk::VK_IMAGE_TYPE_2D,
3691			vk::VK_FORMAT_R8G8B8A8_UNORM,
3692			{
3693				(deUint32)m_srcImageWidth,
3694				(deUint32)m_srcImageHeight,
3695				1u,
3696			},
3697			1, 1, // mipLevels, arrayLayers
3698			vk::VK_SAMPLE_COUNT_1_BIT,
3699
3700			vk::VK_IMAGE_TILING_OPTIMAL,
3701			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3702			vk::VK_SHARING_MODE_EXCLUSIVE,
3703
3704			(deUint32)queueFamilies.size(),
3705			&queueFamilies[0],
3706			vk::VK_IMAGE_LAYOUT_UNDEFINED
3707		};
3708
3709		m_srcImage = vk::createImage(vkd, device, &createInfo);
3710	}
3711
3712	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3713
3714	{
3715		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3716		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3717		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3718		const vk::VkImageMemoryBarrier			preImageBarrier	=
3719		{
3720			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3721			DE_NULL,
3722
3723			0,
3724			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3725
3726			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3727			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3728
3729			vk::VK_QUEUE_FAMILY_IGNORED,
3730			vk::VK_QUEUE_FAMILY_IGNORED,
3731
3732			*m_srcImage,
3733			{
3734				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3735				0,	// Mip level
3736				1,	// Mip level count
3737				0,	// Layer
3738				1	// Layer count
3739			}
3740		};
3741		const vk::VkImageMemoryBarrier			postImageBarrier =
3742		{
3743			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3744			DE_NULL,
3745
3746			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3747			0,
3748
3749			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3750			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3751
3752			vk::VK_QUEUE_FAMILY_IGNORED,
3753			vk::VK_QUEUE_FAMILY_IGNORED,
3754
3755			*m_srcImage,
3756			{
3757				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3758				0,	// Mip level
3759				1,	// Mip level count
3760				0,	// Layer
3761				1	// Layer count
3762			}
3763		};
3764		const vk::VkBufferImageCopy				region				=
3765		{
3766			0,
3767			0, 0,
3768			{
3769				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3770				0,	// mipLevel
3771				0,	// arrayLayer
3772				1	// layerCount
3773			},
3774			{ 0, 0, 0 },
3775			{
3776				(deUint32)m_srcImageWidth,
3777				(deUint32)m_srcImageHeight,
3778				1u
3779			}
3780		};
3781
3782		{
3783			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3784			de::Random	rng	(m_seed);
3785
3786			{
3787				deUint8* const	data = (deUint8*)ptr;
3788
3789				for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3790					data[ndx] = rng.getUint8();
3791			}
3792
3793			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3794			vkd.unmapMemory(device, *memory);
3795		}
3796
3797		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3798		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3799		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3800
3801		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3802		queueRun(vkd, queue, *commandBuffer);
3803	}
3804}
3805
3806void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3807{
3808	log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
3809}
3810
3811void ImageBlitFromImage::submit (SubmitContext& context)
3812{
3813	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3814	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3815	const vk::VkImageBlit		region			=
3816	{
3817		// Src
3818		{
3819			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3820			0,	// mipLevel
3821			0,	// arrayLayer
3822			1	// layerCount
3823		},
3824		{
3825			{ 0, 0, 0 },
3826			{
3827				m_srcImageWidth,
3828				m_srcImageHeight,
3829				1
3830			},
3831		},
3832
3833		// Dst
3834		{
3835			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3836			0,	// mipLevel
3837			0,	// arrayLayer
3838			1	// layerCount
3839		},
3840		{
3841			{ 0, 0, 0 },
3842			{
3843				m_imageWidth,
3844				m_imageHeight,
3845				1u
3846			}
3847		}
3848	};
3849	vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, &region, vk::VK_FILTER_NEAREST);
3850}
3851
3852void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3853{
3854	ReferenceMemory&	reference	(context.getReference());
3855	de::Random			rng			(m_seed);
3856
3857	reference.setUndefined(0, (size_t)m_imageMemorySize);
3858
3859	{
3860		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3861
3862		if (m_scale == BLIT_SCALE_10)
3863		{
3864			for (deInt32 y = 0; y < m_imageHeight; y++)
3865			for (deInt32 x = 0; x < m_imageWidth; x++)
3866			{
3867				const deUint8 r8 = rng.getUint8();
3868				const deUint8 g8 = rng.getUint8();
3869				const deUint8 b8 = rng.getUint8();
3870				const deUint8 a8 = rng.getUint8();
3871
3872				refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3873			}
3874		}
3875		else if (m_scale == BLIT_SCALE_20)
3876		{
3877			tcu::TextureLevel	source	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3878			const float			xscale	= ((float)m_srcImageWidth)  / (float)m_imageWidth;
3879			const float			yscale	= ((float)m_srcImageHeight) / (float)m_imageHeight;
3880
3881			for (deInt32 y = 0; y < m_srcImageHeight; y++)
3882			for (deInt32 x = 0; x < m_srcImageWidth; x++)
3883			{
3884				const deUint8 r8 = rng.getUint8();
3885				const deUint8 g8 = rng.getUint8();
3886				const deUint8 b8 = rng.getUint8();
3887				const deUint8 a8 = rng.getUint8();
3888
3889				source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3890			}
3891
3892			for (deInt32 y = 0; y < m_imageHeight; y++)
3893			for (deInt32 x = 0; x < m_imageWidth; x++)
3894				refAccess.setPixel(source.getAccess().getPixelUint(int(float(x) * xscale), int(float(y) * yscale)), x, y);
3895		}
3896		else
3897			DE_FATAL("Unsupported scale");
3898	}
3899}
3900
3901class ImageBlitToImage : public CmdCommand
3902{
3903public:
3904									ImageBlitToImage	(BlitScale scale, vk::VkImageLayout imageLayout) : m_scale(scale), m_imageLayout(imageLayout) {}
3905									~ImageBlitToImage	(void) {}
3906	const char*						getName				(void) const { return "ImageBlitToImage"; }
3907
3908	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3909	void							prepare				(PrepareContext& context);
3910	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3911	void							submit				(SubmitContext& context);
3912	void							verify				(VerifyContext& context, size_t commandIndex);
3913
3914private:
3915	const BlitScale					m_scale;
3916	const vk::VkImageLayout			m_imageLayout;
3917	deInt32							m_imageWidth;
3918	deInt32							m_imageHeight;
3919	vk::VkDeviceSize				m_imageMemorySize;
3920	deInt32							m_dstImageWidth;
3921	deInt32							m_dstImageHeight;
3922	vk::Move<vk::VkImage>			m_dstImage;
3923	vk::Move<vk::VkDeviceMemory>	m_memory;
3924};
3925
3926void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3927{
3928	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3929}
3930
3931void ImageBlitToImage::prepare (PrepareContext& context)
3932{
3933	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3934	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3935	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3936	const vk::VkDevice				device			= context.getContext().getDevice();
3937	const vk::VkQueue				queue			= context.getContext().getQueue();
3938	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3939	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3940
3941	m_imageWidth		= context.getImageWidth();
3942	m_imageHeight		= context.getImageHeight();
3943	m_imageMemorySize	= context.getImageMemorySize();
3944
3945	if (m_scale == BLIT_SCALE_10)
3946	{
3947		m_dstImageWidth		= context.getImageWidth();
3948		m_dstImageHeight	= context.getImageHeight();
3949	}
3950	else if (m_scale == BLIT_SCALE_20)
3951	{
3952		m_dstImageWidth		= context.getImageWidth() * 2;
3953		m_dstImageHeight	= context.getImageHeight() * 2;
3954	}
3955	else
3956		DE_FATAL("Unsupportd blit scale");
3957
3958	{
3959		const vk::VkImageCreateInfo	createInfo =
3960		{
3961			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3962			DE_NULL,
3963
3964			0,
3965			vk::VK_IMAGE_TYPE_2D,
3966			vk::VK_FORMAT_R8G8B8A8_UNORM,
3967			{
3968				(deUint32)m_dstImageWidth,
3969				(deUint32)m_dstImageHeight,
3970				1u,
3971			},
3972			1, 1, // mipLevels, arrayLayers
3973			vk::VK_SAMPLE_COUNT_1_BIT,
3974
3975			vk::VK_IMAGE_TILING_OPTIMAL,
3976			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3977			vk::VK_SHARING_MODE_EXCLUSIVE,
3978
3979			(deUint32)queueFamilies.size(),
3980			&queueFamilies[0],
3981			vk::VK_IMAGE_LAYOUT_UNDEFINED
3982		};
3983
3984		m_dstImage = vk::createImage(vkd, device, &createInfo);
3985	}
3986
3987	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3988
3989	{
3990		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3991		const vk::VkImageMemoryBarrier			barrier			=
3992		{
3993			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3994			DE_NULL,
3995
3996			0,
3997			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3998
3999			vk::VK_IMAGE_LAYOUT_UNDEFINED,
4000			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4001
4002			vk::VK_QUEUE_FAMILY_IGNORED,
4003			vk::VK_QUEUE_FAMILY_IGNORED,
4004
4005			*m_dstImage,
4006			{
4007				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4008				0,	// Mip level
4009				1,	// Mip level count
4010				0,	// Layer
4011				1	// Layer count
4012			}
4013		};
4014
4015		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
4016
4017		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4018		queueRun(vkd, queue, *commandBuffer);
4019	}
4020}
4021
4022void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4023{
4024	log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
4025}
4026
4027void ImageBlitToImage::submit (SubmitContext& context)
4028{
4029	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
4030	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
4031	const vk::VkImageBlit		region			=
4032	{
4033		// Src
4034		{
4035			vk::VK_IMAGE_ASPECT_COLOR_BIT,
4036			0,	// mipLevel
4037			0,	// arrayLayer
4038			1	// layerCount
4039		},
4040		{
4041			{ 0, 0, 0 },
4042			{
4043				m_imageWidth,
4044				m_imageHeight,
4045				1
4046			},
4047		},
4048
4049		// Dst
4050		{
4051			vk::VK_IMAGE_ASPECT_COLOR_BIT,
4052			0,	// mipLevel
4053			0,	// arrayLayer
4054			1	// layerCount
4055		},
4056		{
4057			{ 0, 0, 0 },
4058			{
4059				m_dstImageWidth,
4060				m_dstImageHeight,
4061				1u
4062			}
4063		}
4064	};
4065	vkd.cmdBlitImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region, vk::VK_FILTER_NEAREST);
4066}
4067
4068void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4069{
4070	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
4071	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4072	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4073	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4074	const vk::VkDevice						device			= context.getContext().getDevice();
4075	const vk::VkQueue						queue			= context.getContext().getQueue();
4076	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
4077	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4078	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4079	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4080	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4081	{
4082		const vk::VkImageMemoryBarrier		imageBarrier	=
4083		{
4084			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4085			DE_NULL,
4086
4087			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4088			vk::VK_ACCESS_TRANSFER_READ_BIT,
4089
4090			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4091			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4092
4093			vk::VK_QUEUE_FAMILY_IGNORED,
4094			vk::VK_QUEUE_FAMILY_IGNORED,
4095
4096			*m_dstImage,
4097			{
4098				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4099				0,	// Mip level
4100				1,	// Mip level count
4101				0,	// Layer
4102				1	// Layer count
4103			}
4104		};
4105		const vk::VkBufferMemoryBarrier bufferBarrier =
4106		{
4107			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4108			DE_NULL,
4109
4110			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4111			vk::VK_ACCESS_HOST_READ_BIT,
4112
4113			vk::VK_QUEUE_FAMILY_IGNORED,
4114			vk::VK_QUEUE_FAMILY_IGNORED,
4115			*dstBuffer,
4116			0,
4117			vk::VK_WHOLE_SIZE
4118		};
4119		const vk::VkBufferImageCopy	region =
4120		{
4121			0,
4122			0, 0,
4123			{
4124				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4125				0,	// mipLevel
4126				0,	// arrayLayer
4127				1	// layerCount
4128			},
4129			{ 0, 0, 0 },
4130			{
4131				(deUint32)m_dstImageWidth,
4132				(deUint32)m_dstImageHeight,
4133				1
4134			}
4135		};
4136
4137		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4138		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
4139		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4140	}
4141
4142	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4143	queueRun(vkd, queue, *commandBuffer);
4144
4145	{
4146		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4147
4148		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_dstImageWidth * m_dstImageHeight);
4149
4150		if (m_scale == BLIT_SCALE_10)
4151		{
4152			const deUint8* const			data		= (const deUint8*)ptr;
4153			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4154			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
4155
4156			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4157				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4158		}
4159		else if (m_scale == BLIT_SCALE_20)
4160		{
4161			const deUint8* const			data		= (const deUint8*)ptr;
4162			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4163			tcu::TextureLevel				reference	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4164
4165			{
4166				const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
4167
4168				for (deInt32 y = 0; y < m_dstImageHeight; y++)
4169				for (deInt32 x = 0; x < m_dstImageWidth; x++)
4170				{
4171					reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4172				}
4173			}
4174
4175			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4176				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4177		}
4178		else
4179			DE_FATAL("Unknown scale");
4180
4181		vkd.unmapMemory(device, *memory);
4182	}
4183}
4184
4185class PrepareRenderPassContext
4186{
4187public:
4188								PrepareRenderPassContext	(PrepareContext&	context,
4189															 vk::VkRenderPass	renderPass,
4190															 vk::VkFramebuffer	framebuffer,
4191															 deInt32			targetWidth,
4192															 deInt32			targetHeight)
4193		: m_context			(context)
4194		, m_renderPass		(renderPass)
4195		, m_framebuffer		(framebuffer)
4196		, m_targetWidth		(targetWidth)
4197		, m_targetHeight	(targetHeight)
4198	{
4199	}
4200
4201	const Memory&									getMemory					(void) const { return m_context.getMemory(); }
4202	const Context&									getContext					(void) const { return m_context.getContext(); }
4203	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection			(void) const { return m_context.getBinaryCollection(); }
4204
4205	vk::VkBuffer				getBuffer					(void) const { return m_context.getBuffer(); }
4206	vk::VkDeviceSize			getBufferSize				(void) const { return m_context.getBufferSize(); }
4207
4208	vk::VkImage					getImage					(void) const { return m_context.getImage(); }
4209	deInt32						getImageWidth				(void) const { return m_context.getImageWidth(); }
4210	deInt32						getImageHeight				(void) const { return m_context.getImageHeight(); }
4211	vk::VkImageLayout			getImageLayout				(void) const { return m_context.getImageLayout(); }
4212
4213	deInt32						getTargetWidth				(void) const { return m_targetWidth; }
4214	deInt32						getTargetHeight				(void) const { return m_targetHeight; }
4215
4216	vk::VkRenderPass			getRenderPass				(void) const { return m_renderPass; }
4217
4218private:
4219	PrepareContext&				m_context;
4220	const vk::VkRenderPass		m_renderPass;
4221	const vk::VkFramebuffer		m_framebuffer;
4222	const deInt32				m_targetWidth;
4223	const deInt32				m_targetHeight;
4224};
4225
4226class VerifyRenderPassContext
4227{
4228public:
4229							VerifyRenderPassContext		(VerifyContext&			context,
4230														 deInt32				targetWidth,
4231														 deInt32				targetHeight)
4232		: m_context			(context)
4233		, m_referenceTarget	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4234	{
4235	}
4236
4237	const Context&			getContext			(void) const { return m_context.getContext(); }
4238	TestLog&				getLog				(void) const { return m_context.getLog(); }
4239	tcu::ResultCollector&	getResultCollector	(void) const { return m_context.getResultCollector(); }
4240
4241	TextureLevel&			getReferenceTarget	(void) { return m_referenceTarget; }
4242
4243	ReferenceMemory&		getReference		(void) { return m_context.getReference(); }
4244	TextureLevel&			getReferenceImage	(void) { return m_context.getReferenceImage();}
4245
4246private:
4247	VerifyContext&	m_context;
4248	TextureLevel	m_referenceTarget;
4249};
4250
4251
4252class RenderPassCommand
4253{
4254public:
4255	virtual				~RenderPassCommand	(void) {}
4256	virtual const char*	getName				(void) const = 0;
4257
4258	// Log things that are done during prepare
4259	virtual void		logPrepare			(TestLog&, size_t) const {}
4260	// Log submitted calls etc.
4261	virtual void		logSubmit			(TestLog&, size_t) const {}
4262
4263	// Allocate vulkan resources and prepare for submit.
4264	virtual void		prepare				(PrepareRenderPassContext&) {}
4265
4266	// Submit commands to command buffer.
4267	virtual void		submit				(SubmitContext&) {}
4268
4269	// Verify results
4270	virtual void		verify				(VerifyRenderPassContext&, size_t) {}
4271};
4272
4273class SubmitRenderPass : public CmdCommand
4274{
4275public:
4276				SubmitRenderPass	(const vector<RenderPassCommand*>& commands);
4277				~SubmitRenderPass	(void);
4278	const char*	getName				(void) const { return "SubmitRenderPass"; }
4279
4280	void		logPrepare			(TestLog&, size_t) const;
4281	void		logSubmit			(TestLog&, size_t) const;
4282
4283	void		prepare				(PrepareContext&);
4284	void		submit				(SubmitContext&);
4285
4286	void		verify				(VerifyContext&, size_t);
4287
4288private:
4289	const deInt32					m_targetWidth;
4290	const deInt32					m_targetHeight;
4291	vk::Move<vk::VkRenderPass>		m_renderPass;
4292	vk::Move<vk::VkDeviceMemory>	m_colorTargetMemory;
4293	de::MovePtr<vk::Allocation>		m_colorTargetMemory2;
4294	vk::Move<vk::VkImage>			m_colorTarget;
4295	vk::Move<vk::VkImageView>		m_colorTargetView;
4296	vk::Move<vk::VkFramebuffer>		m_framebuffer;
4297	vector<RenderPassCommand*>		m_commands;
4298};
4299
4300SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4301	: m_targetWidth		(256)
4302	, m_targetHeight	(256)
4303	, m_commands		(commands)
4304{
4305}
4306
4307SubmitRenderPass::~SubmitRenderPass()
4308{
4309	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4310		delete m_commands[cmdNdx];
4311}
4312
4313void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4314{
4315	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4316	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4317
4318	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4319	{
4320		RenderPassCommand& command = *m_commands[cmdNdx];
4321		command.logPrepare(log, cmdNdx);
4322	}
4323}
4324
4325void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4326{
4327	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4328	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4329
4330	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4331	{
4332		RenderPassCommand& command = *m_commands[cmdNdx];
4333		command.logSubmit(log, cmdNdx);
4334	}
4335}
4336
4337void SubmitRenderPass::prepare (PrepareContext& context)
4338{
4339	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4340	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4341	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4342	const vk::VkDevice						device			= context.getContext().getDevice();
4343	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4344
4345	const vk::VkAttachmentReference	colorAttachments[]	=
4346	{
4347		{ 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4348	};
4349	const vk::VkSubpassDescription	subpass				=
4350	{
4351		0u,
4352		vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4353
4354		0u,
4355		DE_NULL,
4356
4357		DE_LENGTH_OF_ARRAY(colorAttachments),
4358		colorAttachments,
4359		DE_NULL,
4360		DE_NULL,
4361		0u,
4362		DE_NULL
4363	};
4364	const vk::VkAttachmentDescription attachment =
4365	{
4366		0u,
4367		vk::VK_FORMAT_R8G8B8A8_UNORM,
4368		vk::VK_SAMPLE_COUNT_1_BIT,
4369
4370		vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4371		vk::VK_ATTACHMENT_STORE_OP_STORE,
4372
4373		vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4374		vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4375
4376		vk::VK_IMAGE_LAYOUT_UNDEFINED,
4377		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4378	};
4379	{
4380		const vk::VkImageCreateInfo createInfo =
4381		{
4382			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4383			DE_NULL,
4384			0u,
4385
4386			vk::VK_IMAGE_TYPE_2D,
4387			vk::VK_FORMAT_R8G8B8A8_UNORM,
4388			{ (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4389			1u,
4390			1u,
4391			vk::VK_SAMPLE_COUNT_1_BIT,
4392			vk::VK_IMAGE_TILING_OPTIMAL,
4393			vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4394			vk::VK_SHARING_MODE_EXCLUSIVE,
4395			(deUint32)queueFamilies.size(),
4396			&queueFamilies[0],
4397			vk::VK_IMAGE_LAYOUT_UNDEFINED
4398		};
4399
4400		m_colorTarget = vk::createImage(vkd, device, &createInfo);
4401	}
4402
4403	m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4404
4405	{
4406		const vk::VkImageViewCreateInfo createInfo =
4407		{
4408			vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4409			DE_NULL,
4410
4411			0u,
4412			*m_colorTarget,
4413			vk::VK_IMAGE_VIEW_TYPE_2D,
4414			vk::VK_FORMAT_R8G8B8A8_UNORM,
4415			{
4416				vk::VK_COMPONENT_SWIZZLE_R,
4417				vk::VK_COMPONENT_SWIZZLE_G,
4418				vk::VK_COMPONENT_SWIZZLE_B,
4419				vk::VK_COMPONENT_SWIZZLE_A
4420			},
4421			{
4422				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4423				0u,
4424				1u,
4425				0u,
4426				1u
4427			}
4428		};
4429
4430		m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4431	}
4432	{
4433		const vk::VkRenderPassCreateInfo createInfo =
4434		{
4435			vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4436			DE_NULL,
4437			0u,
4438
4439			1u,
4440			&attachment,
4441
4442			1u,
4443			&subpass,
4444
4445			0,
4446			DE_NULL
4447		};
4448
4449		m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4450	}
4451
4452	{
4453		const vk::VkImageView				imageViews[]	=
4454		{
4455			*m_colorTargetView
4456		};
4457		const vk::VkFramebufferCreateInfo	createInfo		=
4458		{
4459			vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4460			DE_NULL,
4461			0u,
4462
4463			*m_renderPass,
4464			DE_LENGTH_OF_ARRAY(imageViews),
4465			imageViews,
4466			(deUint32)m_targetWidth,
4467			(deUint32)m_targetHeight,
4468			1u
4469		};
4470
4471		m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4472	}
4473
4474	{
4475		PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4476
4477		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4478		{
4479			RenderPassCommand& command = *m_commands[cmdNdx];
4480			command.prepare(renderpassContext);
4481		}
4482	}
4483}
4484
4485void SubmitRenderPass::submit (SubmitContext& context)
4486{
4487	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
4488	const vk::VkCommandBuffer		commandBuffer	= context.getCommandBuffer();
4489	const vk::VkClearValue			clearValue		= vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4490
4491	const vk::VkRenderPassBeginInfo	beginInfo		=
4492	{
4493		vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4494		DE_NULL,
4495
4496		*m_renderPass,
4497		*m_framebuffer,
4498
4499		{ { 0, 0 },  { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
4500		1u,
4501		&clearValue
4502	};
4503
4504	vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4505
4506	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4507	{
4508		RenderPassCommand& command = *m_commands[cmdNdx];
4509
4510		command.submit(context);
4511	}
4512
4513	vkd.cmdEndRenderPass(commandBuffer);
4514}
4515
4516void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4517{
4518	TestLog&					log				(context.getLog());
4519	tcu::ResultCollector&		resultCollector	(context.getResultCollector());
4520	const string				sectionName		(de::toString(commandIndex) + ":" + getName());
4521	const tcu::ScopedLogSection	section			(log, sectionName, sectionName);
4522	VerifyRenderPassContext		verifyContext	(context, m_targetWidth, m_targetHeight);
4523
4524	tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4525
4526	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4527	{
4528		RenderPassCommand& command = *m_commands[cmdNdx];
4529		command.verify(verifyContext, cmdNdx);
4530	}
4531
4532	{
4533		const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4534		const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4535		const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4536		const vk::VkDevice						device			= context.getContext().getDevice();
4537		const vk::VkQueue						queue			= context.getContext().getQueue();
4538		const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
4539		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4540		const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4541		const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4542		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4543		{
4544			const vk::VkImageMemoryBarrier		imageBarrier	=
4545			{
4546				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4547				DE_NULL,
4548
4549				vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4550				vk::VK_ACCESS_TRANSFER_READ_BIT,
4551
4552				vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4553				vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4554
4555				vk::VK_QUEUE_FAMILY_IGNORED,
4556				vk::VK_QUEUE_FAMILY_IGNORED,
4557
4558				*m_colorTarget,
4559				{
4560					vk::VK_IMAGE_ASPECT_COLOR_BIT,
4561					0,	// Mip level
4562					1,	// Mip level count
4563					0,	// Layer
4564					1	// Layer count
4565				}
4566			};
4567			const vk::VkBufferMemoryBarrier bufferBarrier =
4568			{
4569				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4570				DE_NULL,
4571
4572				vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4573				vk::VK_ACCESS_HOST_READ_BIT,
4574
4575				vk::VK_QUEUE_FAMILY_IGNORED,
4576				vk::VK_QUEUE_FAMILY_IGNORED,
4577				*dstBuffer,
4578				0,
4579				vk::VK_WHOLE_SIZE
4580			};
4581			const vk::VkBufferImageCopy	region =
4582			{
4583				0,
4584				0, 0,
4585				{
4586					vk::VK_IMAGE_ASPECT_COLOR_BIT,
4587					0,	// mipLevel
4588					0,	// arrayLayer
4589					1	// layerCount
4590				},
4591				{ 0, 0, 0 },
4592				{
4593					(deUint32)m_targetWidth,
4594					(deUint32)m_targetHeight,
4595					1u
4596				}
4597			};
4598
4599			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4600			vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
4601			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4602		}
4603
4604		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4605		queueRun(vkd, queue, *commandBuffer);
4606
4607		{
4608			void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4609
4610			vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_targetWidth * m_targetHeight);
4611
4612			{
4613				const deUint8* const			data		= (const deUint8*)ptr;
4614				const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4615				const ConstPixelBufferAccess&	refAccess	(verifyContext.getReferenceTarget().getAccess());
4616
4617				if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4618					resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4619			}
4620
4621			vkd.unmapMemory(device, *memory);
4622		}
4623	}
4624}
4625
4626class RenderBuffer : public RenderPassCommand
4627{
4628public:
4629	enum RenderAs
4630	{
4631		RENDERAS_VERTEX_BUFFER,
4632		RENDERAS_INDEX_BUFFER,
4633	};
4634				RenderBuffer		(RenderAs renderAs) : m_renderAs(renderAs) {}
4635				~RenderBuffer		(void) {}
4636
4637	const char*	getName				(void) const { return "RenderBuffer"; }
4638	void		logPrepare			(TestLog&, size_t) const;
4639	void		logSubmit			(TestLog&, size_t) const;
4640	void		prepare				(PrepareRenderPassContext&);
4641	void		submit				(SubmitContext& context);
4642	void		verify				(VerifyRenderPassContext&, size_t);
4643
4644private:
4645	const RenderAs						m_renderAs;
4646	vk::Move<vk::VkPipeline>			m_pipeline;
4647	vk::Move<vk::VkPipelineLayout>		m_pipelineLayout;
4648	vk::VkDeviceSize					m_bufferSize;
4649
4650	static const vk::ProgramBinary&		getVertexShader		(const vk::ProgramCollection<vk::ProgramBinary>& collections, RenderAs renderAs)
4651	{
4652		switch (renderAs)
4653		{
4654			case RENDERAS_VERTEX_BUFFER:
4655				return collections.get("vertex-buffer.vert");
4656
4657			case RENDERAS_INDEX_BUFFER:
4658				return collections.get("index-buffer.vert");
4659
4660			default:
4661				DE_FATAL("Unknown renderAs");
4662				return collections.get("");
4663		}
4664	}
4665};
4666
4667void RenderBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4668{
4669	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4670}
4671
4672void RenderBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4673{
4674	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4675}
4676
4677void RenderBuffer::prepare (PrepareRenderPassContext& context)
4678{
4679	const vk::DeviceInterface&				vkd						= context.getContext().getDeviceInterface();
4680	const vk::VkDevice						device					= context.getContext().getDevice();
4681	const vk::VkRenderPass					renderPass				= context.getRenderPass();
4682	const deUint32							subpass					= 0;
4683	const vk::Unique<vk::VkShaderModule>	vertexShaderModule		(vk::createShaderModule(vkd, device, getVertexShader(context.getBinaryCollection(), m_renderAs), 0));
4684	const vk::Unique<vk::VkShaderModule>	fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4685
4686	m_bufferSize = context.getBufferSize();
4687
4688	{
4689		const vk::VkPipelineLayoutCreateInfo	createInfo	=
4690		{
4691			vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4692			DE_NULL,
4693			0,
4694			0,
4695			DE_NULL,
4696			0,
4697			DE_NULL
4698		};
4699
4700		m_pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4701	}
4702
4703	{
4704		const vk::VkPipelineShaderStageCreateInfo			shaderStages[]					=
4705		{
4706			{
4707				vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4708				DE_NULL,
4709				0,
4710				vk::VK_SHADER_STAGE_VERTEX_BIT,
4711				*vertexShaderModule,
4712				"main",
4713				DE_NULL
4714			},
4715			{
4716				vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4717				DE_NULL,
4718				0,
4719				vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4720				*fragmentShaderModule,
4721				"main",
4722				DE_NULL
4723			}
4724		};
4725		const vk::VkPipelineDepthStencilStateCreateInfo		depthStencilState				=
4726		{
4727			vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4728			DE_NULL,
4729			0u,
4730			DE_FALSE,
4731			DE_FALSE,
4732			vk::VK_COMPARE_OP_ALWAYS,
4733			DE_FALSE,
4734			DE_FALSE,
4735			{
4736				vk::VK_STENCIL_OP_KEEP,
4737				vk::VK_STENCIL_OP_KEEP,
4738				vk::VK_STENCIL_OP_KEEP,
4739				vk::VK_COMPARE_OP_ALWAYS,
4740				0u,
4741				0u,
4742				0u,
4743			},
4744			{
4745				vk::VK_STENCIL_OP_KEEP,
4746				vk::VK_STENCIL_OP_KEEP,
4747				vk::VK_STENCIL_OP_KEEP,
4748				vk::VK_COMPARE_OP_ALWAYS,
4749				0u,
4750				0u,
4751				0u,
4752			},
4753			-1.0f,
4754			+1.0f
4755		};
4756		const vk::VkVertexInputBindingDescription			vertexBindingDescriptions[]		=
4757		{
4758			{
4759				0,
4760				2,
4761				vk::VK_VERTEX_INPUT_RATE_VERTEX
4762			}
4763		};
4764		const vk::VkVertexInputAttributeDescription			vertexAttributeDescriptions[]	=
4765		{
4766			{
4767				0,
4768				0,
4769				vk::VK_FORMAT_R8G8_UNORM,
4770				0
4771			}
4772		};
4773		const vk::VkPipelineVertexInputStateCreateInfo		vertexInputState				=
4774		{
4775			vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4776			DE_NULL,
4777			0u,
4778
4779			m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexBindingDescriptions) : 0u,
4780			m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexBindingDescriptions : DE_NULL,
4781
4782			m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexAttributeDescriptions) : 0u,
4783			m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexAttributeDescriptions : DE_NULL,
4784		};
4785		const vk::VkPipelineInputAssemblyStateCreateInfo	inputAssemblyState				=
4786		{
4787			vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4788			DE_NULL,
4789			0,
4790			vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4791			vk::VK_FALSE
4792		};
4793		const vk::VkViewport								viewports[]						=
4794		{
4795			{ 0.0f, 0.0f, (float)context.getTargetWidth(), (float)context.getTargetHeight(), 0.0f, 1.0f }
4796		};
4797		const vk::VkRect2D									scissors[]						=
4798		{
4799			{ { 0, 0 }, { (deUint32)context.getTargetWidth(), (deUint32)context.getTargetHeight() } }
4800		};
4801		const vk::VkPipelineViewportStateCreateInfo			viewportState					=
4802		{
4803			vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4804			DE_NULL,
4805			0,
4806			DE_LENGTH_OF_ARRAY(viewports),
4807			viewports,
4808			DE_LENGTH_OF_ARRAY(scissors),
4809			scissors
4810		};
4811		const vk::VkPipelineRasterizationStateCreateInfo	rasterState						=
4812		{
4813			vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4814			DE_NULL,
4815			0,
4816
4817			vk::VK_TRUE,
4818			vk::VK_FALSE,
4819			vk::VK_POLYGON_MODE_FILL,
4820			vk::VK_CULL_MODE_NONE,
4821			vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4822			vk::VK_FALSE,
4823			0.0f,
4824			0.0f,
4825			0.0f,
4826			1.0f
4827		};
4828		const vk::VkSampleMask								sampleMask						= ~0u;
4829		const vk::VkPipelineMultisampleStateCreateInfo		multisampleState				=
4830		{
4831			vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4832			DE_NULL,
4833			0,
4834
4835			vk::VK_SAMPLE_COUNT_1_BIT,
4836			vk::VK_FALSE,
4837			0.0f,
4838			&sampleMask,
4839			vk::VK_FALSE,
4840			vk::VK_FALSE
4841		};
4842		const vk::VkPipelineColorBlendAttachmentState		attachments[]					=
4843		{
4844			{
4845				vk::VK_FALSE,
4846				vk::VK_BLEND_FACTOR_ONE,
4847				vk::VK_BLEND_FACTOR_ZERO,
4848				vk::VK_BLEND_OP_ADD,
4849				vk::VK_BLEND_FACTOR_ONE,
4850				vk::VK_BLEND_FACTOR_ZERO,
4851				vk::VK_BLEND_OP_ADD,
4852				(vk::VK_COLOR_COMPONENT_R_BIT|
4853				 vk::VK_COLOR_COMPONENT_G_BIT|
4854				 vk::VK_COLOR_COMPONENT_B_BIT|
4855				 vk::VK_COLOR_COMPONENT_A_BIT)
4856			}
4857		};
4858		const vk::VkPipelineColorBlendStateCreateInfo		colorBlendState					=
4859		{
4860			vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4861			DE_NULL,
4862			0,
4863
4864			vk::VK_FALSE,
4865			vk::VK_LOGIC_OP_COPY,
4866			DE_LENGTH_OF_ARRAY(attachments),
4867			attachments,
4868			{ 0.0f, 0.0f, 0.0f, 0.0f }
4869		};
4870		const vk::VkGraphicsPipelineCreateInfo				createInfo						=
4871		{
4872			vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4873			DE_NULL,
4874			0u,
4875
4876			DE_LENGTH_OF_ARRAY(shaderStages),
4877			shaderStages,
4878
4879			&vertexInputState,
4880			&inputAssemblyState,
4881			DE_NULL,
4882			&viewportState,
4883			&rasterState,
4884			&multisampleState,
4885			&depthStencilState,
4886			&colorBlendState,
4887			DE_NULL,
4888			*m_pipelineLayout,
4889			renderPass,
4890			subpass,
4891			0,
4892			0
4893		};
4894
4895		m_pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4896	}
4897}
4898
4899void RenderBuffer::submit (SubmitContext& context)
4900{
4901	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
4902	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
4903	const vk::VkDeviceSize		offset			= 0;
4904	const vk::VkBuffer			buffer			= context.getBuffer();
4905
4906	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4907
4908	if (m_renderAs == RENDERAS_VERTEX_BUFFER)
4909	{
4910		vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
4911		vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
4912	}
4913	else if (m_renderAs == RENDERAS_INDEX_BUFFER)
4914	{
4915		vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4916		vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4917	}
4918	else
4919		DE_FATAL("Unknown renderAs");
4920}
4921
4922void RenderBuffer::verify (VerifyRenderPassContext& context, size_t)
4923{
4924	for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4925	{
4926		const deUint8 x  = context.getReference().get(pos * 2);
4927		const deUint8 y  = context.getReference().get((pos * 2) + 1);
4928
4929		context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4930	}
4931}
4932
4933enum Op
4934{
4935	OP_MAP,
4936	OP_UNMAP,
4937
4938	OP_MAP_FLUSH,
4939	OP_MAP_INVALIDATE,
4940
4941	OP_MAP_READ,
4942	OP_MAP_WRITE,
4943	OP_MAP_MODIFY,
4944
4945	OP_BUFFER_CREATE,
4946	OP_BUFFER_DESTROY,
4947	OP_BUFFER_BINDMEMORY,
4948
4949	OP_QUEUE_WAIT_FOR_IDLE,
4950	OP_DEVICE_WAIT_FOR_IDLE,
4951
4952	OP_COMMAND_BUFFER_BEGIN,
4953	OP_COMMAND_BUFFER_END,
4954
4955	// Buffer transfer operations
4956	OP_BUFFER_FILL,
4957	OP_BUFFER_UPDATE,
4958
4959	OP_BUFFER_COPY_TO_BUFFER,
4960	OP_BUFFER_COPY_FROM_BUFFER,
4961
4962	OP_BUFFER_COPY_TO_IMAGE,
4963	OP_BUFFER_COPY_FROM_IMAGE,
4964
4965	OP_IMAGE_CREATE,
4966	OP_IMAGE_DESTROY,
4967	OP_IMAGE_BINDMEMORY,
4968
4969	OP_IMAGE_TRANSITION_LAYOUT,
4970
4971	OP_IMAGE_COPY_TO_BUFFER,
4972	OP_IMAGE_COPY_FROM_BUFFER,
4973
4974	OP_IMAGE_COPY_TO_IMAGE,
4975	OP_IMAGE_COPY_FROM_IMAGE,
4976
4977	OP_IMAGE_BLIT_TO_IMAGE,
4978	OP_IMAGE_BLIT_FROM_IMAGE,
4979
4980	OP_IMAGE_RESOLVE,
4981
4982	OP_PIPELINE_BARRIER_GLOBAL,
4983	OP_PIPELINE_BARRIER_BUFFER,
4984	OP_PIPELINE_BARRIER_IMAGE,
4985
4986	// Renderpass operations
4987	OP_RENDERPASS_BEGIN,
4988	OP_RENDERPASS_END,
4989
4990	// Commands inside render pass
4991	OP_RENDER_VERTEX_BUFFER,
4992	OP_RENDER_INDEX_BUFFER
4993};
4994
4995enum Stage
4996{
4997	STAGE_HOST,
4998	STAGE_COMMAND_BUFFER,
4999
5000	STAGE_RENDER_PASS
5001};
5002
5003vk::VkAccessFlags getWriteAccessFlags (void)
5004{
5005	return vk::VK_ACCESS_SHADER_WRITE_BIT
5006		| vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
5007		| vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
5008		| vk::VK_ACCESS_TRANSFER_WRITE_BIT
5009		| vk::VK_ACCESS_HOST_WRITE_BIT
5010		| vk::VK_ACCESS_MEMORY_WRITE_BIT;
5011}
5012
5013bool isWriteAccess (vk::VkAccessFlagBits access)
5014{
5015	return (getWriteAccessFlags() & access) != 0;
5016}
5017
5018class CacheState
5019{
5020public:
5021									CacheState				(vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
5022
5023	bool							isValid					(vk::VkPipelineStageFlagBits	stage,
5024															 vk::VkAccessFlagBits			access) const;
5025
5026	void							perform					(vk::VkPipelineStageFlagBits	stage,
5027															 vk::VkAccessFlagBits			access);
5028
5029	void							submitCommandBuffer		(void);
5030	void							waitForIdle				(void);
5031
5032	void							getFullBarrier			(vk::VkPipelineStageFlags&	srcStages,
5033															 vk::VkAccessFlags&			srcAccesses,
5034															 vk::VkPipelineStageFlags&	dstStages,
5035															 vk::VkAccessFlags&			dstAccesses) const;
5036
5037	void							barrier					(vk::VkPipelineStageFlags	srcStages,
5038															 vk::VkAccessFlags			srcAccesses,
5039															 vk::VkPipelineStageFlags	dstStages,
5040															 vk::VkAccessFlags			dstAccesses);
5041
5042	void							imageLayoutBarrier		(vk::VkPipelineStageFlags	srcStages,
5043															 vk::VkAccessFlags			srcAccesses,
5044															 vk::VkPipelineStageFlags	dstStages,
5045															 vk::VkAccessFlags			dstAccesses);
5046
5047	void							checkImageLayoutBarrier	(vk::VkPipelineStageFlags	srcStages,
5048															 vk::VkAccessFlags			srcAccesses,
5049															 vk::VkPipelineStageFlags	dstStages,
5050															 vk::VkAccessFlags			dstAccesses);
5051
5052	// Everything is clean and there is no need for barriers
5053	bool							isClean					(void) const;
5054
5055	vk::VkPipelineStageFlags		getAllowedStages		(void) const { return m_allowedStages; }
5056	vk::VkAccessFlags				getAllowedAcceses		(void) const { return m_allowedAccesses; }
5057private:
5058	// Limit which stages and accesses are used by the CacheState tracker
5059	const vk::VkPipelineStageFlags	m_allowedStages;
5060	const vk::VkAccessFlags			m_allowedAccesses;
5061
5062	// [dstStage][srcStage] = srcAccesses
5063	// In stage dstStage write srcAccesses from srcStage are not yet available
5064	vk::VkAccessFlags				m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5065	// Latest pipeline transition is not available in stage
5066	bool							m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
5067	// [dstStage] = dstAccesses
5068	// In stage dstStage ops with dstAccesses are not yet visible
5069	vk::VkAccessFlags				m_invisibleOperations[PIPELINESTAGE_LAST];
5070
5071	// [dstStage] = srcStage
5072	// Memory operation in srcStage have not completed before dstStage
5073	vk::VkPipelineStageFlags		m_incompleteOperations[PIPELINESTAGE_LAST];
5074};
5075
5076CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
5077	: m_allowedStages	(allowedStages)
5078	, m_allowedAccesses	(allowedAccesses)
5079{
5080	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5081	{
5082		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5083
5084		if ((dstStage_ & m_allowedStages) == 0)
5085			continue;
5086
5087		// All operations are initially visible
5088		m_invisibleOperations[dstStage] = 0;
5089
5090		// There are no incomplete read operations initially
5091		m_incompleteOperations[dstStage] = 0;
5092
5093		// There are no incomplete layout transitions
5094		m_unavailableLayoutTransition[dstStage] = false;
5095
5096		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5097		{
5098			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5099
5100			if ((srcStage_ & m_allowedStages) == 0)
5101				continue;
5102
5103			// There are no write operations that are not yet available
5104			// initially.
5105			m_unavailableWriteOperations[dstStage][srcStage] = 0;
5106		}
5107	}
5108}
5109
5110bool CacheState::isValid (vk::VkPipelineStageFlagBits	stage,
5111						  vk::VkAccessFlagBits			access) const
5112{
5113	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5114	DE_ASSERT((stage & (~m_allowedStages)) == 0);
5115
5116	const PipelineStage	dstStage	= pipelineStageFlagToPipelineStage(stage);
5117
5118	// Previous operations are not visible to access on stage
5119	if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
5120		return false;
5121
5122	if (isWriteAccess(access))
5123	{
5124		// Memory operations from other stages have not completed before
5125		// dstStage
5126		if (m_incompleteOperations[dstStage] != 0)
5127			return false;
5128	}
5129
5130	return true;
5131}
5132
5133void CacheState::perform (vk::VkPipelineStageFlagBits	stage,
5134						  vk::VkAccessFlagBits			access)
5135{
5136	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5137	DE_ASSERT((stage & (~m_allowedStages)) == 0);
5138
5139	const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
5140
5141	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5142	{
5143		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5144
5145		if ((dstStage_ & m_allowedStages) == 0)
5146			continue;
5147
5148		// Mark stage as incomplete for all stages
5149		m_incompleteOperations[dstStage] |= stage;
5150
5151		if (isWriteAccess(access))
5152		{
5153			// Mark all accesses from all stages invisible
5154			m_invisibleOperations[dstStage] |= m_allowedAccesses;
5155
5156			// Mark write access from srcStage unavailable to all stages
5157			m_unavailableWriteOperations[dstStage][srcStage] |= access;
5158		}
5159	}
5160}
5161
5162void CacheState::submitCommandBuffer (void)
5163{
5164	// Flush all host writes and reads
5165	barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
5166			m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
5167			m_allowedStages,
5168			m_allowedAccesses);
5169}
5170
5171void CacheState::waitForIdle (void)
5172{
5173	// Make all writes available
5174	barrier(m_allowedStages,
5175			m_allowedAccesses & getWriteAccessFlags(),
5176			m_allowedStages,
5177			0);
5178
5179	// Make all writes visible on device side
5180	barrier(m_allowedStages,
5181			0,
5182			m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
5183			m_allowedAccesses);
5184}
5185
5186void CacheState::getFullBarrier (vk::VkPipelineStageFlags&	srcStages,
5187								 vk::VkAccessFlags&			srcAccesses,
5188								 vk::VkPipelineStageFlags&	dstStages,
5189								 vk::VkAccessFlags&			dstAccesses) const
5190{
5191	srcStages	= 0;
5192	srcAccesses	= 0;
5193	dstStages	= 0;
5194	dstAccesses	= 0;
5195
5196	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5197	{
5198		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5199
5200		if ((dstStage_ & m_allowedStages) == 0)
5201			continue;
5202
5203		// Make sure all previous operation are complete in all stages
5204		if (m_incompleteOperations[dstStage])
5205		{
5206			dstStages |= dstStage_;
5207			srcStages |= m_incompleteOperations[dstStage];
5208		}
5209
5210		// Make sure all read operations are visible in dstStage
5211		if (m_invisibleOperations[dstStage])
5212		{
5213			dstStages |= dstStage_;
5214			dstAccesses |= m_invisibleOperations[dstStage];
5215		}
5216
5217		// Make sure all write operations fro mall stages are available
5218		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5219		{
5220			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5221
5222			if ((srcStage_ & m_allowedStages) == 0)
5223				continue;
5224
5225			if (m_unavailableWriteOperations[dstStage][srcStage])
5226			{
5227				dstStages |= dstStage_;
5228				srcStages |= dstStage_;
5229				srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
5230			}
5231
5232			if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
5233			{
5234				// Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
5235				// but has completed in srcStage.
5236				dstStages |= dstStage_;
5237				srcStages |= dstStage_;
5238			}
5239		}
5240	}
5241
5242	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5243	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5244	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5245	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5246}
5247
5248void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags	srcStages,
5249										 vk::VkAccessFlags			srcAccesses,
5250										 vk::VkPipelineStageFlags	dstStages,
5251										 vk::VkAccessFlags			dstAccesses)
5252{
5253	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5254	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5255	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5256	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5257
5258	DE_UNREF(srcStages);
5259	DE_UNREF(srcAccesses);
5260
5261	DE_UNREF(dstStages);
5262	DE_UNREF(dstAccesses);
5263
5264#if defined(DE_DEBUG)
5265	// Check that all stages have completed before srcStages or are in srcStages.
5266	{
5267		vk::VkPipelineStageFlags completedStages = srcStages;
5268
5269		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5270		{
5271			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5272
5273			if ((srcStage_ & srcStages) == 0)
5274				continue;
5275
5276			completedStages |= (~m_incompleteOperations[srcStage]);
5277		}
5278
5279		DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
5280	}
5281
5282	// Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
5283	if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
5284	{
5285		bool anyWriteAvailable = false;
5286
5287		for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5288		{
5289			const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5290
5291			if ((dstStage_ & m_allowedStages) == 0)
5292				continue;
5293
5294			for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5295			{
5296				const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5297
5298				if ((srcStage_ & m_allowedStages) == 0)
5299					continue;
5300
5301				if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
5302				{
5303					anyWriteAvailable = true;
5304					break;
5305				}
5306			}
5307		}
5308
5309		DE_ASSERT(anyWriteAvailable);
5310	}
5311#endif
5312}
5313
5314void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags	srcStages,
5315									 vk::VkAccessFlags			srcAccesses,
5316									 vk::VkPipelineStageFlags	dstStages,
5317									 vk::VkAccessFlags			dstAccesses)
5318{
5319	checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5320
5321	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5322	{
5323		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5324
5325		if ((dstStage_ & m_allowedStages) == 0)
5326			continue;
5327
5328		// All stages are incomplete after the barrier except each dstStage in it self.
5329		m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
5330
5331		// All memory operations are invisible unless they are listed in dstAccess
5332		m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
5333
5334		// Layout transition is unavailable in stage unless it was listed in dstStages
5335		m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
5336
5337		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5338		{
5339			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5340
5341			if ((srcStage_ & m_allowedStages) == 0)
5342				continue;
5343
5344			// All write operations are available after layout transition
5345			m_unavailableWriteOperations[dstStage][srcStage] = 0;
5346		}
5347	}
5348}
5349
5350void CacheState::barrier (vk::VkPipelineStageFlags	srcStages,
5351						  vk::VkAccessFlags			srcAccesses,
5352						  vk::VkPipelineStageFlags	dstStages,
5353						  vk::VkAccessFlags			dstAccesses)
5354{
5355	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5356	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5357	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5358	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5359
5360	// Transitivity
5361	{
5362		vk::VkPipelineStageFlags		oldIncompleteOperations[PIPELINESTAGE_LAST];
5363		vk::VkAccessFlags				oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5364		bool							oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
5365
5366		deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
5367		deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
5368		deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
5369
5370		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5371		{
5372			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5373
5374			if ((srcStage_ & srcStages) == 0)
5375				continue;
5376
5377			for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5378			{
5379				const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5380
5381				if ((dstStage_ & dstStages) == 0)
5382					continue;
5383
5384				// Stages that have completed before srcStage have also completed before dstStage
5385				m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
5386
5387				// Image layout transition in srcStage are now available in dstStage
5388				m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
5389
5390				for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
5391				{
5392					const PipelineStage	sharedStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
5393
5394					if ((sharedStage_ & m_allowedStages) == 0)
5395						continue;
5396
5397					// Writes that are available in srcStage are also available in dstStage
5398					m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
5399				}
5400			}
5401		}
5402	}
5403
5404	// Barrier
5405	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5406	{
5407		const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5408		bool				allWritesAvailable	= true;
5409
5410		if ((dstStage_ & dstStages) == 0)
5411			continue;
5412
5413		// Operations in srcStages have completed before any stage in dstStages
5414		m_incompleteOperations[dstStage] &= ~srcStages;
5415
5416		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5417		{
5418			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5419
5420			if ((srcStage_ & m_allowedStages) == 0)
5421				continue;
5422
5423			// Make srcAccesses from srcStage available in dstStage
5424			if ((srcStage_ & srcStages) != 0)
5425				m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
5426
5427			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5428				allWritesAvailable = false;
5429		}
5430
5431		// If all writes are available in dstStage make dstAccesses also visible
5432		if (allWritesAvailable)
5433			m_invisibleOperations[dstStage] &= ~dstAccesses;
5434	}
5435}
5436
5437bool CacheState::isClean (void) const
5438{
5439	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5440	{
5441		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5442
5443		if ((dstStage_ & m_allowedStages) == 0)
5444			continue;
5445
5446		// Some operations are not visible to some stages
5447		if (m_invisibleOperations[dstStage] != 0)
5448			return false;
5449
5450		// There are operation that have not completed yet
5451		if (m_incompleteOperations[dstStage] != 0)
5452			return false;
5453
5454		// Layout transition has not completed yet
5455		if (m_unavailableLayoutTransition[dstStage])
5456			return false;
5457
5458		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5459		{
5460			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5461
5462			if ((srcStage_ & m_allowedStages) == 0)
5463				continue;
5464
5465			// Some write operations are not available yet
5466			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5467				return false;
5468		}
5469	}
5470
5471	return true;
5472}
5473
5474struct State
5475{
5476	State (Usage usage, deUint32 seed)
5477		: stage					(STAGE_HOST)
5478		, cache					(usageToStageFlags(usage), usageToAccessFlags(usage))
5479		, rng					(seed)
5480		, mapped				(false)
5481		, hostInvalidated		(true)
5482		, hostFlushed			(true)
5483		, memoryDefined			(false)
5484		, hasBuffer				(false)
5485		, hasBoundBufferMemory	(false)
5486		, hasImage				(false)
5487		, hasBoundImageMemory	(false)
5488		, imageLayout			(vk::VK_IMAGE_LAYOUT_UNDEFINED)
5489		, imageDefined			(false)
5490		, queueIdle				(true)
5491		, deviceIdle			(true)
5492		, commandBufferIsEmpty	(true)
5493	{
5494	}
5495
5496	Stage				stage;
5497	CacheState			cache;
5498	de::Random			rng;
5499
5500	bool				mapped;
5501	bool				hostInvalidated;
5502	bool				hostFlushed;
5503	bool				memoryDefined;
5504
5505	bool				hasBuffer;
5506	bool				hasBoundBufferMemory;
5507
5508	bool				hasImage;
5509	bool				hasBoundImageMemory;
5510	vk::VkImageLayout	imageLayout;
5511	bool				imageDefined;
5512
5513	bool				queueIdle;
5514	bool				deviceIdle;
5515
5516	bool				commandBufferIsEmpty;
5517};
5518
5519void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
5520{
5521	if (state.stage == STAGE_HOST)
5522	{
5523		if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
5524		{
5525			// Host memory operations
5526			if (state.mapped)
5527			{
5528				ops.push_back(OP_UNMAP);
5529
5530				// Avoid flush and finish if they are not needed
5531				if (!state.hostFlushed)
5532					ops.push_back(OP_MAP_FLUSH);
5533
5534				if (!state.hostInvalidated
5535					&& state.queueIdle
5536					&& ((usage & USAGE_HOST_READ) == 0
5537						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5538					&& ((usage & USAGE_HOST_WRITE) == 0
5539						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
5540				{
5541					ops.push_back(OP_MAP_INVALIDATE);
5542				}
5543
5544				if (usage & USAGE_HOST_READ
5545					&& usage & USAGE_HOST_WRITE
5546					&& state.memoryDefined
5547					&& state.hostInvalidated
5548					&& state.queueIdle
5549					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
5550					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5551				{
5552					ops.push_back(OP_MAP_MODIFY);
5553				}
5554
5555				if (usage & USAGE_HOST_READ
5556					&& state.memoryDefined
5557					&& state.hostInvalidated
5558					&& state.queueIdle
5559					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5560				{
5561					ops.push_back(OP_MAP_READ);
5562				}
5563
5564				if (usage & USAGE_HOST_WRITE
5565					&& state.hostInvalidated
5566					&& state.queueIdle
5567					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
5568				{
5569					ops.push_back(OP_MAP_WRITE);
5570				}
5571			}
5572			else
5573				ops.push_back(OP_MAP);
5574		}
5575
5576		if (state.hasBoundBufferMemory && state.queueIdle)
5577		{
5578			// \note Destroy only buffers after they have been bound
5579			ops.push_back(OP_BUFFER_DESTROY);
5580		}
5581		else
5582		{
5583			if (state.hasBuffer)
5584			{
5585				if (!state.hasBoundBufferMemory)
5586					ops.push_back(OP_BUFFER_BINDMEMORY);
5587			}
5588			else if (!state.hasImage && supportsBuffers)	// Avoid creating buffer if there is already image
5589				ops.push_back(OP_BUFFER_CREATE);
5590		}
5591
5592		if (state.hasBoundImageMemory && state.queueIdle)
5593		{
5594			// \note Destroy only image after they have been bound
5595			ops.push_back(OP_IMAGE_DESTROY);
5596		}
5597		else
5598		{
5599			if (state.hasImage)
5600			{
5601				if (!state.hasBoundImageMemory)
5602					ops.push_back(OP_IMAGE_BINDMEMORY);
5603			}
5604			else if (!state.hasBuffer && supportsImages)	// Avoid creating image if there is already buffer
5605				ops.push_back(OP_IMAGE_CREATE);
5606		}
5607
5608		// Host writes must be flushed before GPU commands and there must be
5609		// buffer or image for GPU commands
5610		if (state.hostFlushed
5611			&& (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
5612			&& (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
5613			&& (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
5614		{
5615			ops.push_back(OP_COMMAND_BUFFER_BEGIN);
5616		}
5617
5618		if (!state.deviceIdle)
5619			ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
5620
5621		if (!state.queueIdle)
5622			ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
5623	}
5624	else if (state.stage == STAGE_COMMAND_BUFFER)
5625	{
5626		if (!state.cache.isClean())
5627		{
5628			ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
5629
5630			if (state.hasImage)
5631				ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
5632
5633			if (state.hasBuffer)
5634				ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
5635		}
5636
5637		if (state.hasBoundBufferMemory)
5638		{
5639			if (usage & USAGE_TRANSFER_DST
5640				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5641			{
5642				ops.push_back(OP_BUFFER_FILL);
5643				ops.push_back(OP_BUFFER_UPDATE);
5644				ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
5645				ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
5646			}
5647
5648			if (usage & USAGE_TRANSFER_SRC
5649				&& state.memoryDefined
5650				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5651			{
5652				ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
5653				ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
5654			}
5655		}
5656
5657		if (state.hasBoundImageMemory)
5658		{
5659			ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
5660
5661			{
5662				if (usage & USAGE_TRANSFER_DST
5663					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5664						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
5665					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5666				{
5667					ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
5668					ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
5669					ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
5670				}
5671
5672				if (usage & USAGE_TRANSFER_SRC
5673					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5674						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
5675					&& state.imageDefined
5676					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5677				{
5678					ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
5679					ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
5680					ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
5681				}
5682			}
5683		}
5684
5685		// \todo [2016-03-09 mika] Add other usages?
5686		if (((usage & USAGE_VERTEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5687			|| ((usage & USAGE_INDEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT)))
5688			ops.push_back(OP_RENDERPASS_BEGIN);
5689
5690		// \note This depends on previous operations and has to be always the
5691		// last command buffer operation check
5692		if (ops.empty() || !state.commandBufferIsEmpty)
5693			ops.push_back(OP_COMMAND_BUFFER_END);
5694	}
5695	else if (state.stage == STAGE_RENDER_PASS)
5696	{
5697		if (usage & USAGE_VERTEX_BUFFER
5698			&& state.memoryDefined
5699			&& state.hasBoundBufferMemory
5700			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5701		{
5702			ops.push_back(OP_RENDER_VERTEX_BUFFER);
5703		}
5704
5705		if (usage & USAGE_INDEX_BUFFER
5706			&& state.memoryDefined
5707			&& state.hasBoundBufferMemory
5708			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
5709		{
5710			ops.push_back(OP_RENDER_INDEX_BUFFER);
5711		}
5712
5713		ops.push_back(OP_RENDERPASS_END);
5714	}
5715	else
5716		DE_FATAL("Unknown stage");
5717}
5718
5719bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
5720{
5721	switch (layout)
5722	{
5723		case vk::VK_IMAGE_LAYOUT_GENERAL:
5724			return true;
5725
5726		case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
5727			return (usage & USAGE_COLOR_ATTACHMENT) != 0;
5728
5729		case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
5730			return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5731
5732		case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
5733			return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5734
5735		case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
5736			// \todo [2016-03-09 mika] Should include input attachment
5737			return (usage & USAGE_TEXTURE_SAMPLED) != 0;
5738
5739		case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
5740			return (usage & USAGE_TRANSFER_SRC) != 0;
5741
5742		case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
5743			return (usage & USAGE_TRANSFER_DST) != 0;
5744
5745		case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
5746			return true;
5747
5748		default:
5749			DE_FATAL("Unknown layout");
5750			return false;
5751	}
5752}
5753
5754vk::VkImageLayout getRandomNextLayout (de::Random&			rng,
5755									   Usage				usage,
5756									   vk::VkImageLayout	previousLayout)
5757{
5758	const vk::VkImageLayout layouts[] =
5759	{
5760		vk::VK_IMAGE_LAYOUT_GENERAL,
5761		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5762		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
5763		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
5764		vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
5765		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
5766		vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
5767	};
5768	size_t possibleLayoutCount = 0;
5769
5770	for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5771	{
5772		const vk::VkImageLayout layout = layouts[layoutNdx];
5773
5774		if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5775			possibleLayoutCount++;
5776	}
5777
5778	size_t nextLayoutNdx = ((size_t)rng.getUint64()) % possibleLayoutCount;
5779
5780	for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5781	{
5782		const vk::VkImageLayout layout = layouts[layoutNdx];
5783
5784		if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5785		{
5786			if (nextLayoutNdx == 0)
5787				return layout;
5788			else
5789				nextLayoutNdx--;
5790		}
5791	}
5792
5793	DE_FATAL("Unreachable");
5794	return vk::VK_IMAGE_LAYOUT_UNDEFINED;
5795}
5796
5797void applyOp (State& state, const Memory& memory, Op op, Usage usage)
5798{
5799	switch (op)
5800	{
5801		case OP_MAP:
5802			DE_ASSERT(state.stage == STAGE_HOST);
5803			DE_ASSERT(!state.mapped);
5804			state.mapped = true;
5805			break;
5806
5807		case OP_UNMAP:
5808			DE_ASSERT(state.stage == STAGE_HOST);
5809			DE_ASSERT(state.mapped);
5810			state.mapped = false;
5811			break;
5812
5813		case OP_MAP_FLUSH:
5814			DE_ASSERT(state.stage == STAGE_HOST);
5815			DE_ASSERT(!state.hostFlushed);
5816			state.hostFlushed = true;
5817			break;
5818
5819		case OP_MAP_INVALIDATE:
5820			DE_ASSERT(state.stage == STAGE_HOST);
5821			DE_ASSERT(!state.hostInvalidated);
5822			state.hostInvalidated = true;
5823			break;
5824
5825		case OP_MAP_READ:
5826			DE_ASSERT(state.stage == STAGE_HOST);
5827			DE_ASSERT(state.hostInvalidated);
5828			state.rng.getUint32();
5829			break;
5830
5831		case OP_MAP_WRITE:
5832			DE_ASSERT(state.stage == STAGE_HOST);
5833			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5834				state.hostFlushed = false;
5835
5836			state.memoryDefined = true;
5837			state.imageDefined = false;
5838			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5839			state.rng.getUint32();
5840			break;
5841
5842		case OP_MAP_MODIFY:
5843			DE_ASSERT(state.stage == STAGE_HOST);
5844			DE_ASSERT(state.hostInvalidated);
5845
5846			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5847				state.hostFlushed = false;
5848
5849			state.rng.getUint32();
5850			break;
5851
5852		case OP_BUFFER_CREATE:
5853			DE_ASSERT(state.stage == STAGE_HOST);
5854			DE_ASSERT(!state.hasBuffer);
5855
5856			state.hasBuffer = true;
5857			break;
5858
5859		case OP_BUFFER_DESTROY:
5860			DE_ASSERT(state.stage == STAGE_HOST);
5861			DE_ASSERT(state.hasBuffer);
5862			DE_ASSERT(state.hasBoundBufferMemory);
5863
5864			state.hasBuffer = false;
5865			state.hasBoundBufferMemory = false;
5866			break;
5867
5868		case OP_BUFFER_BINDMEMORY:
5869			DE_ASSERT(state.stage == STAGE_HOST);
5870			DE_ASSERT(state.hasBuffer);
5871			DE_ASSERT(!state.hasBoundBufferMemory);
5872
5873			state.hasBoundBufferMemory = true;
5874			break;
5875
5876		case OP_IMAGE_CREATE:
5877			DE_ASSERT(state.stage == STAGE_HOST);
5878			DE_ASSERT(!state.hasImage);
5879			DE_ASSERT(!state.hasBuffer);
5880
5881			state.hasImage = true;
5882			break;
5883
5884		case OP_IMAGE_DESTROY:
5885			DE_ASSERT(state.stage == STAGE_HOST);
5886			DE_ASSERT(state.hasImage);
5887			DE_ASSERT(state.hasBoundImageMemory);
5888
5889			state.hasImage = false;
5890			state.hasBoundImageMemory = false;
5891			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5892			state.imageDefined = false;
5893			break;
5894
5895		case OP_IMAGE_BINDMEMORY:
5896			DE_ASSERT(state.stage == STAGE_HOST);
5897			DE_ASSERT(state.hasImage);
5898			DE_ASSERT(!state.hasBoundImageMemory);
5899
5900			state.hasBoundImageMemory = true;
5901			break;
5902
5903		case OP_IMAGE_TRANSITION_LAYOUT:
5904		{
5905			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5906			DE_ASSERT(state.hasImage);
5907			DE_ASSERT(state.hasBoundImageMemory);
5908
5909			// \todo [2016-03-09 mika] Support linear tiling and predefined data
5910			const vk::VkImageLayout		srcLayout	= state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
5911			const vk::VkImageLayout		dstLayout	= getRandomNextLayout(state.rng, usage, srcLayout);
5912
5913			vk::VkPipelineStageFlags	dirtySrcStages;
5914			vk::VkAccessFlags			dirtySrcAccesses;
5915			vk::VkPipelineStageFlags	dirtyDstStages;
5916			vk::VkAccessFlags			dirtyDstAccesses;
5917
5918			vk::VkPipelineStageFlags	srcStages;
5919			vk::VkAccessFlags			srcAccesses;
5920			vk::VkPipelineStageFlags	dstStages;
5921			vk::VkAccessFlags			dstAccesses;
5922
5923			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
5924
5925			// Try masking some random bits
5926			srcStages	= dirtySrcStages;
5927			srcAccesses	= dirtySrcAccesses;
5928
5929			dstStages	= state.cache.getAllowedStages() & state.rng.getUint32();
5930			dstAccesses	= state.cache.getAllowedAcceses() & state.rng.getUint32();
5931
5932			// If there are no bits in dst stage mask use all stages
5933			dstStages	= dstStages ? dstStages : state.cache.getAllowedStages();
5934
5935			if (!srcStages)
5936				srcStages = dstStages;
5937
5938			if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
5939				state.imageDefined = false;
5940
5941			state.commandBufferIsEmpty = false;
5942			state.imageLayout = dstLayout;
5943			state.memoryDefined = false;
5944			state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5945			break;
5946		}
5947
5948		case OP_QUEUE_WAIT_FOR_IDLE:
5949			DE_ASSERT(state.stage == STAGE_HOST);
5950			DE_ASSERT(!state.queueIdle);
5951
5952			state.queueIdle = true;
5953
5954			state.cache.waitForIdle();
5955			break;
5956
5957		case OP_DEVICE_WAIT_FOR_IDLE:
5958			DE_ASSERT(state.stage == STAGE_HOST);
5959			DE_ASSERT(!state.deviceIdle);
5960
5961			state.queueIdle = true;
5962			state.deviceIdle = true;
5963
5964			state.cache.waitForIdle();
5965			break;
5966
5967		case OP_COMMAND_BUFFER_BEGIN:
5968			DE_ASSERT(state.stage == STAGE_HOST);
5969			state.stage = STAGE_COMMAND_BUFFER;
5970			state.commandBufferIsEmpty = true;
5971			// Makes host writes visible to command buffer
5972			state.cache.submitCommandBuffer();
5973			break;
5974
5975		case OP_COMMAND_BUFFER_END:
5976			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5977			state.stage = STAGE_HOST;
5978			state.queueIdle = false;
5979			state.deviceIdle = false;
5980			break;
5981
5982		case OP_BUFFER_COPY_FROM_BUFFER:
5983		case OP_BUFFER_COPY_FROM_IMAGE:
5984		case OP_BUFFER_UPDATE:
5985		case OP_BUFFER_FILL:
5986			state.rng.getUint32();
5987			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5988
5989			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5990				state.hostInvalidated = false;
5991
5992			state.commandBufferIsEmpty = false;
5993			state.memoryDefined = true;
5994			state.imageDefined = false;
5995			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5996			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
5997			break;
5998
5999		case OP_BUFFER_COPY_TO_BUFFER:
6000		case OP_BUFFER_COPY_TO_IMAGE:
6001			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6002
6003			state.commandBufferIsEmpty = false;
6004			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6005			break;
6006
6007		case OP_IMAGE_BLIT_FROM_IMAGE:
6008			state.rng.getBool();
6009			// Fall through
6010		case OP_IMAGE_COPY_FROM_BUFFER:
6011		case OP_IMAGE_COPY_FROM_IMAGE:
6012			state.rng.getUint32();
6013			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6014
6015			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6016				state.hostInvalidated = false;
6017
6018			state.commandBufferIsEmpty = false;
6019			state.memoryDefined = false;
6020			state.imageDefined = true;
6021			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
6022			break;
6023
6024		case OP_IMAGE_BLIT_TO_IMAGE:
6025			state.rng.getBool();
6026			// Fall through
6027		case OP_IMAGE_COPY_TO_BUFFER:
6028		case OP_IMAGE_COPY_TO_IMAGE:
6029			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6030
6031			state.commandBufferIsEmpty = false;
6032			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6033			break;
6034
6035		case OP_PIPELINE_BARRIER_GLOBAL:
6036		case OP_PIPELINE_BARRIER_BUFFER:
6037		case OP_PIPELINE_BARRIER_IMAGE:
6038		{
6039			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6040
6041			vk::VkPipelineStageFlags	dirtySrcStages;
6042			vk::VkAccessFlags			dirtySrcAccesses;
6043			vk::VkPipelineStageFlags	dirtyDstStages;
6044			vk::VkAccessFlags			dirtyDstAccesses;
6045
6046			vk::VkPipelineStageFlags	srcStages;
6047			vk::VkAccessFlags			srcAccesses;
6048			vk::VkPipelineStageFlags	dstStages;
6049			vk::VkAccessFlags			dstAccesses;
6050
6051			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6052
6053			// Try masking some random bits
6054			srcStages	= dirtySrcStages & state.rng.getUint32();
6055			srcAccesses	= dirtySrcAccesses & state.rng.getUint32();
6056
6057			dstStages	= dirtyDstStages & state.rng.getUint32();
6058			dstAccesses	= dirtyDstAccesses & state.rng.getUint32();
6059
6060			// If there are no bits in stage mask use the original dirty stages
6061			srcStages	= srcStages ? srcStages : dirtySrcStages;
6062			dstStages	= dstStages ? dstStages : dirtyDstStages;
6063
6064			if (!srcStages)
6065				srcStages = dstStages;
6066
6067			state.commandBufferIsEmpty = false;
6068			state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
6069			break;
6070		}
6071
6072		case OP_RENDERPASS_BEGIN:
6073		{
6074			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6075
6076			state.stage = STAGE_RENDER_PASS;
6077			break;
6078		}
6079
6080		case OP_RENDERPASS_END:
6081		{
6082			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6083
6084			state.stage = STAGE_COMMAND_BUFFER;
6085			break;
6086		}
6087
6088		case OP_RENDER_VERTEX_BUFFER:
6089		{
6090			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6091
6092			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
6093			break;
6094		}
6095
6096		case OP_RENDER_INDEX_BUFFER:
6097		{
6098			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6099
6100			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
6101			break;
6102		}
6103
6104		default:
6105			DE_FATAL("Unknown op");
6106	}
6107}
6108
6109de::MovePtr<Command> createHostCommand (Op					op,
6110										de::Random&			rng,
6111										Usage				usage,
6112										vk::VkSharingMode	sharing)
6113{
6114	switch (op)
6115	{
6116		case OP_MAP:					return de::MovePtr<Command>(new Map());
6117		case OP_UNMAP:					return de::MovePtr<Command>(new UnMap());
6118
6119		case OP_MAP_FLUSH:				return de::MovePtr<Command>(new Flush());
6120		case OP_MAP_INVALIDATE:			return de::MovePtr<Command>(new Invalidate());
6121
6122		case OP_MAP_READ:				return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
6123		case OP_MAP_WRITE:				return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
6124		case OP_MAP_MODIFY:				return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
6125
6126		case OP_BUFFER_CREATE:			return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
6127		case OP_BUFFER_DESTROY:			return de::MovePtr<Command>(new DestroyBuffer());
6128		case OP_BUFFER_BINDMEMORY:		return de::MovePtr<Command>(new BindBufferMemory());
6129
6130		case OP_IMAGE_CREATE:			return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
6131		case OP_IMAGE_DESTROY:			return de::MovePtr<Command>(new DestroyImage());
6132		case OP_IMAGE_BINDMEMORY:		return de::MovePtr<Command>(new BindImageMemory());
6133
6134		case OP_QUEUE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new QueueWaitIdle());
6135		case OP_DEVICE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new DeviceWaitIdle());
6136
6137		default:
6138			DE_FATAL("Unknown op");
6139			return de::MovePtr<Command>(DE_NULL);
6140	}
6141}
6142
6143de::MovePtr<CmdCommand> createCmdCommand (de::Random&	rng,
6144										  const State&	state,
6145										  Op			op,
6146										  Usage			usage)
6147{
6148	switch (op)
6149	{
6150		case OP_BUFFER_FILL:					return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
6151		case OP_BUFFER_UPDATE:					return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
6152		case OP_BUFFER_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
6153		case OP_BUFFER_COPY_FROM_BUFFER:		return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
6154
6155		case OP_BUFFER_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyToImage());
6156		case OP_BUFFER_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
6157
6158		case OP_IMAGE_TRANSITION_LAYOUT:
6159		{
6160			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6161			DE_ASSERT(state.hasImage);
6162			DE_ASSERT(state.hasBoundImageMemory);
6163
6164			const vk::VkImageLayout		srcLayout	= rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
6165			const vk::VkImageLayout		dstLayout	= getRandomNextLayout(rng, usage, srcLayout);
6166
6167			vk::VkPipelineStageFlags	dirtySrcStages;
6168			vk::VkAccessFlags			dirtySrcAccesses;
6169			vk::VkPipelineStageFlags	dirtyDstStages;
6170			vk::VkAccessFlags			dirtyDstAccesses;
6171
6172			vk::VkPipelineStageFlags	srcStages;
6173			vk::VkAccessFlags			srcAccesses;
6174			vk::VkPipelineStageFlags	dstStages;
6175			vk::VkAccessFlags			dstAccesses;
6176
6177			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6178
6179			// Try masking some random bits
6180			srcStages	= dirtySrcStages;
6181			srcAccesses	= dirtySrcAccesses;
6182
6183			dstStages	= state.cache.getAllowedStages() & rng.getUint32();
6184			dstAccesses	= state.cache.getAllowedAcceses() & rng.getUint32();
6185
6186			// If there are no bits in dst stage mask use all stages
6187			dstStages	= dstStages ? dstStages : state.cache.getAllowedStages();
6188
6189			if (!srcStages)
6190				srcStages = dstStages;
6191
6192			return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
6193		}
6194
6195		case OP_IMAGE_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyToBuffer(state.imageLayout));
6196		case OP_IMAGE_COPY_FROM_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32(), state.imageLayout));
6197		case OP_IMAGE_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyToImage(state.imageLayout));
6198		case OP_IMAGE_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32(), state.imageLayout));
6199		case OP_IMAGE_BLIT_TO_IMAGE:
6200		{
6201			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6202			return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale, state.imageLayout));
6203		}
6204
6205		case OP_IMAGE_BLIT_FROM_IMAGE:
6206		{
6207			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6208			return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale, state.imageLayout));
6209		}
6210
6211		case OP_PIPELINE_BARRIER_GLOBAL:
6212		case OP_PIPELINE_BARRIER_BUFFER:
6213		case OP_PIPELINE_BARRIER_IMAGE:
6214		{
6215			vk::VkPipelineStageFlags	dirtySrcStages;
6216			vk::VkAccessFlags			dirtySrcAccesses;
6217			vk::VkPipelineStageFlags	dirtyDstStages;
6218			vk::VkAccessFlags			dirtyDstAccesses;
6219
6220			vk::VkPipelineStageFlags	srcStages;
6221			vk::VkAccessFlags			srcAccesses;
6222			vk::VkPipelineStageFlags	dstStages;
6223			vk::VkAccessFlags			dstAccesses;
6224
6225			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6226
6227			// Try masking some random bits
6228			srcStages	= dirtySrcStages & rng.getUint32();
6229			srcAccesses	= dirtySrcAccesses & rng.getUint32();
6230
6231			dstStages	= dirtyDstStages & rng.getUint32();
6232			dstAccesses	= dirtyDstAccesses & rng.getUint32();
6233
6234			// If there are no bits in stage mask use the original dirty stages
6235			srcStages	= srcStages ? srcStages : dirtySrcStages;
6236			dstStages	= dstStages ? dstStages : dirtyDstStages;
6237
6238			if (!srcStages)
6239				srcStages = dstStages;
6240
6241			PipelineBarrier::Type type;
6242
6243			if (op == OP_PIPELINE_BARRIER_IMAGE)
6244				type = PipelineBarrier::TYPE_IMAGE;
6245			else if (op == OP_PIPELINE_BARRIER_BUFFER)
6246				type = PipelineBarrier::TYPE_BUFFER;
6247			else if (op == OP_PIPELINE_BARRIER_GLOBAL)
6248				type = PipelineBarrier::TYPE_GLOBAL;
6249			else
6250			{
6251				type = PipelineBarrier::TYPE_LAST;
6252				DE_FATAL("Unknown op");
6253			}
6254
6255			if (type == PipelineBarrier::TYPE_IMAGE)
6256				return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::just(state.imageLayout)));
6257			else
6258				return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::nothing<vk::VkImageLayout>()));
6259		}
6260
6261		default:
6262			DE_FATAL("Unknown op");
6263			return de::MovePtr<CmdCommand>(DE_NULL);
6264	}
6265}
6266
6267de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
6268														const State&,
6269														Op				op)
6270{
6271	switch (op)
6272	{
6273		case OP_RENDER_VERTEX_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_VERTEX_BUFFER));
6274		case OP_RENDER_INDEX_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_INDEX_BUFFER));
6275
6276		default:
6277			DE_FATAL("Unknown op");
6278			return de::MovePtr<RenderPassCommand>(DE_NULL);
6279	}
6280}
6281
6282de::MovePtr<CmdCommand> createRenderPassCommands (const Memory&	memory,
6283												  de::Random&	nextOpRng,
6284												  State&		state,
6285												  Usage			usage,
6286												  size_t&		opNdx,
6287												  size_t		opCount)
6288{
6289	vector<RenderPassCommand*>	commands;
6290
6291	try
6292	{
6293		for (; opNdx < opCount; opNdx++)
6294		{
6295			vector<Op>	ops;
6296
6297			getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6298
6299			DE_ASSERT(!ops.empty());
6300
6301			{
6302				const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6303
6304				if (op == OP_RENDERPASS_END)
6305				{
6306					break;
6307				}
6308				else
6309				{
6310					de::Random	rng	(state.rng);
6311
6312					commands.push_back(createRenderPassCommand(rng, state, op).release());
6313					applyOp(state, memory, op, usage);
6314
6315					DE_ASSERT(state.rng == rng);
6316				}
6317			}
6318		}
6319
6320		applyOp(state, memory, OP_RENDERPASS_END, usage);
6321		return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
6322	}
6323	catch (...)
6324	{
6325		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6326			delete commands[commandNdx];
6327
6328		throw;
6329	}
6330}
6331
6332de::MovePtr<Command> createCmdCommands (const Memory&	memory,
6333										de::Random&		nextOpRng,
6334										State&			state,
6335										Usage			usage,
6336										size_t&			opNdx,
6337										size_t			opCount)
6338{
6339	vector<CmdCommand*>	commands;
6340
6341	try
6342	{
6343		for (; opNdx < opCount; opNdx++)
6344		{
6345			vector<Op>	ops;
6346
6347			getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6348
6349			DE_ASSERT(!ops.empty());
6350
6351			{
6352				const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6353
6354				if (op == OP_COMMAND_BUFFER_END)
6355				{
6356					break;
6357				}
6358				else
6359				{
6360					// \note Command needs to known the state before the operation
6361					if (op == OP_RENDERPASS_BEGIN)
6362					{
6363						applyOp(state, memory, op, usage);
6364						commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6365					}
6366					else
6367					{
6368						de::Random	rng	(state.rng);
6369
6370						commands.push_back(createCmdCommand(rng, state, op, usage).release());
6371						applyOp(state, memory, op, usage);
6372
6373						DE_ASSERT(state.rng == rng);
6374					}
6375
6376				}
6377			}
6378		}
6379
6380		applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
6381		return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
6382	}
6383	catch (...)
6384	{
6385		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6386			delete commands[commandNdx];
6387
6388		throw;
6389	}
6390}
6391
6392void createCommands (vector<Command*>&			commands,
6393					 deUint32					seed,
6394					 const Memory&				memory,
6395					 Usage						usage,
6396					 vk::VkSharingMode			sharingMode)
6397{
6398	const size_t		opCount		= 100;
6399	State				state		(usage, seed);
6400	// Used to select next operation only
6401	de::Random			nextOpRng	(seed ^ 12930809);
6402
6403	commands.reserve(opCount);
6404
6405	for (size_t opNdx = 0; opNdx < opCount; opNdx++)
6406	{
6407		vector<Op>	ops;
6408
6409		getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6410
6411		DE_ASSERT(!ops.empty());
6412
6413		{
6414			const Op	op	= nextOpRng.choose<Op>(ops.begin(), ops.end());
6415
6416			if (op == OP_COMMAND_BUFFER_BEGIN)
6417			{
6418				applyOp(state, memory, op, usage);
6419				commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6420			}
6421			else
6422			{
6423				de::Random	rng	(state.rng);
6424
6425				commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
6426				applyOp(state, memory, op, usage);
6427
6428				// Make sure that random generator is in sync
6429				DE_ASSERT(state.rng == rng);
6430			}
6431		}
6432	}
6433
6434	// Clean up resources
6435	if (state.hasBuffer && state.hasImage)
6436	{
6437		if (!state.queueIdle)
6438			commands.push_back(new QueueWaitIdle());
6439
6440		if (state.hasBuffer)
6441			commands.push_back(new DestroyBuffer());
6442
6443		if (state.hasImage)
6444			commands.push_back(new DestroyImage());
6445	}
6446}
6447
6448void testCommand (TestLog&											log,
6449				  tcu::ResultCollector&								resultCollector,
6450				  const vk::ProgramCollection<vk::ProgramBinary>&	binaryCollection,
6451				  const vk::InstanceInterface&						vki,
6452				  const vk::DeviceInterface&						vkd,
6453				  vk::VkPhysicalDevice								physicalDevice,
6454				  vk::VkDevice										device,
6455				  vk::VkDeviceSize									size,
6456				  deUint32											memoryTypeIndex,
6457				  Usage												usage,
6458				  vk::VkSharingMode									sharingMode,
6459				  vk::VkQueue										executionQueue,
6460				  deUint32											executionQueueFamily,
6461				  const vector<deUint32>&							queueFamilies,
6462				  const vk::VkDeviceSize							maxBufferSize,
6463				  const IVec2										maxImageSize)
6464{
6465	const deUint32							seed			= 2830980989u;
6466	Memory									memory			(vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
6467	vector<Command*>						commands;
6468	vector<pair<deUint32, vk::VkQueue> >	queues;
6469
6470	try
6471	{
6472		log << TestLog::Message << "Create commands" << TestLog::EndMessage;
6473		createCommands(commands, seed, memory, usage, sharingMode);
6474
6475		for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
6476		{
6477			vk::VkQueue queue;
6478
6479			vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
6480			queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
6481		}
6482
6483		{
6484			const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
6485
6486			for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6487				commands[cmdNdx]->logPrepare(log, cmdNdx);
6488		}
6489
6490		{
6491			const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
6492
6493			for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6494				commands[cmdNdx]->logExecute(log, cmdNdx);
6495		}
6496
6497		{
6498			const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
6499
6500			try
6501			{
6502				{
6503					PrepareContext	prepareContext	(context, memory);
6504
6505					log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
6506
6507					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6508					{
6509						Command& command = *commands[cmdNdx];
6510
6511						try
6512						{
6513							command.prepare(prepareContext);
6514						}
6515						catch (...)
6516						{
6517							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
6518							throw;
6519						}
6520					}
6521
6522					ExecuteContext	executeContext	(context);
6523
6524					log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
6525
6526					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6527					{
6528						Command& command = *commands[cmdNdx];
6529
6530						try
6531						{
6532							command.execute(executeContext);
6533						}
6534						catch (...)
6535						{
6536							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
6537							throw;
6538						}
6539					}
6540
6541					VK_CHECK(vkd.deviceWaitIdle(device));
6542				}
6543
6544				{
6545					const tcu::ScopedLogSection	section			(log, "Verify", "Verify");
6546					VerifyContext				verifyContext	(log, resultCollector, context, size);
6547
6548					log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
6549
6550					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6551					{
6552						Command& command = *commands[cmdNdx];
6553
6554						try
6555						{
6556							command.verify(verifyContext, cmdNdx);
6557						}
6558						catch (...)
6559						{
6560							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
6561							throw;
6562						}
6563					}
6564				}
6565
6566				for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6567				{
6568					delete commands[commandNdx];
6569					commands[commandNdx] = DE_NULL;
6570				}
6571			}
6572			catch (...)
6573			{
6574				for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6575				{
6576					delete commands[commandNdx];
6577					commands[commandNdx] = DE_NULL;
6578				}
6579
6580				throw;
6581			}
6582		}
6583	}
6584	catch (...)
6585	{
6586		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6587		{
6588			delete commands[commandNdx];
6589			commands[commandNdx] = DE_NULL;
6590		}
6591
6592		throw;
6593	}
6594}
6595class MemoryTestInstance : public TestInstance
6596{
6597public:
6598
6599						MemoryTestInstance	(::vkt::Context& context, const TestConfig& config);
6600
6601	tcu::TestStatus		iterate				(void);
6602
6603private:
6604	const TestConfig							m_config;
6605	const vk::VkPhysicalDeviceMemoryProperties	m_memoryProperties;
6606	deUint32									m_memoryTypeNdx;
6607	tcu::ResultCollector						m_resultCollector;
6608};
6609
6610MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
6611	: TestInstance			(context)
6612	, m_config				(config)
6613	, m_memoryProperties	(vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
6614	, m_memoryTypeNdx		(0)
6615	, m_resultCollector		(context.getTestContext().getLog())
6616{
6617	TestLog&	log		= context.getTestContext().getLog();
6618	{
6619		const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
6620
6621		log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
6622		log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
6623		log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
6624	}
6625
6626	{
6627		const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
6628
6629		for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
6630		{
6631			const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
6632
6633			log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
6634			log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
6635		}
6636
6637		for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
6638		{
6639			const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
6640
6641			log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
6642			log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
6643		}
6644	}
6645}
6646
6647tcu::TestStatus MemoryTestInstance::iterate (void)
6648{
6649	// \todo [2016-03-09 mika] Split different stages over multiple iterations
6650	if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
6651	{
6652		TestLog&									log					= m_context.getTestContext().getLog();
6653		const tcu::ScopedLogSection					section				(log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
6654		const vk::InstanceInterface&				vki					= m_context.getInstanceInterface();
6655		const vk::VkPhysicalDevice					physicalDevice		= m_context.getPhysicalDevice();
6656		const vk::DeviceInterface&					vkd					= m_context.getDeviceInterface();
6657		const vk::VkDevice							device				= m_context.getDevice();
6658		const vk::VkQueue							queue				= m_context.getUniversalQueue();
6659		const deUint32								queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
6660		const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
6661		vector<deUint32>							queues;
6662
6663		queues.push_back(queueFamilyIndex);
6664
6665		if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
6666			&& !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
6667		{
6668			log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
6669
6670			m_memoryTypeNdx++;
6671			return tcu::TestStatus::incomplete();
6672		}
6673		else
6674		{
6675			try
6676			{
6677				const vk::VkBufferUsageFlags	bufferUsage		= usageToBufferUsageFlags(m_config.usage);
6678				const vk::VkImageUsageFlags		imageUsage		= usageToImageUsageFlags(m_config.usage);
6679				const vk::VkDeviceSize			maxBufferSize	= bufferUsage != 0
6680																? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
6681																: 0;
6682				const IVec2						maxImageSize	= imageUsage != 0
6683																? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
6684																: IVec2(0, 0);
6685
6686				log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
6687				log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
6688
6689				// Skip tests if there are no supported operations
6690				if (maxBufferSize == 0
6691					&& maxImageSize[0] == 0
6692					&& (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
6693				{
6694					log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
6695				}
6696				else
6697				{
6698					testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
6699				}
6700			}
6701			catch (const tcu::TestError& e)
6702			{
6703				m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
6704			}
6705
6706			m_memoryTypeNdx++;
6707			return tcu::TestStatus::incomplete();
6708		}
6709	}
6710	else
6711		return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
6712}
6713
6714struct AddPrograms
6715{
6716	void init (vk::SourceCollections& sources, TestConfig config) const
6717	{
6718		// Vertex buffer rendering
6719		if (config.usage & USAGE_VERTEX_BUFFER)
6720		{
6721			const char* const vertexShader =
6722				"#version 310 es\n"
6723				"layout(location = 0) in highp vec2 a_position;\n"
6724				"void main (void) {\n"
6725				"\tgl_PointSize = 1.0;\n"
6726				"\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
6727				"}\n";
6728
6729			sources.glslSources.add("vertex-buffer.vert")
6730				<< glu::VertexSource(vertexShader);
6731		}
6732
6733		// Index buffer rendering
6734		if (config.usage & USAGE_INDEX_BUFFER)
6735		{
6736			const char* const vertexShader =
6737				"#version 310 es\n"
6738				"highp float;\n"
6739				"void main (void) {\n"
6740				"\tgl_PointSize = 1.0;\n"
6741				"\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
6742				"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
6743				"}\n";
6744
6745			sources.glslSources.add("index-buffer.vert")
6746				<< glu::VertexSource(vertexShader);
6747		}
6748
6749		{
6750			const char* const fragmentShader =
6751				"#version 310 es\n"
6752				"layout(location = 0) out highp vec4 o_color;\n"
6753				"void main (void) {\n"
6754				"\to_color = vec4(1.0);\n"
6755				"}\n";
6756
6757			sources.glslSources.add("render-white.frag")
6758				<< glu::FragmentSource(fragmentShader);
6759		}
6760	}
6761};
6762
6763} // anonymous
6764
6765tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
6766{
6767	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
6768	const vk::VkDeviceSize			sizes[]			=
6769	{
6770		1024,			// 1K
6771		8*1024,			// 8K
6772		64*1024,		// 64K
6773		1024*1024,		// 1M
6774	};
6775	const Usage						usages[]		=
6776	{
6777		USAGE_HOST_READ,
6778		USAGE_HOST_WRITE,
6779		USAGE_TRANSFER_SRC,
6780		USAGE_TRANSFER_DST,
6781		USAGE_VERTEX_BUFFER,
6782		USAGE_INDEX_BUFFER
6783	};
6784	const Usage						readUsages[]		=
6785	{
6786		USAGE_HOST_READ,
6787		USAGE_TRANSFER_SRC,
6788		USAGE_VERTEX_BUFFER,
6789		USAGE_INDEX_BUFFER
6790	};
6791
6792	const Usage						writeUsages[]	=
6793	{
6794		USAGE_HOST_WRITE,
6795		USAGE_TRANSFER_DST
6796	};
6797
6798	for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
6799	{
6800		const Usage	writeUsage	= writeUsages[writeUsageNdx];
6801
6802		for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
6803		{
6804			const Usage						readUsage		= readUsages[readUsageNdx];
6805			const Usage						usage			= writeUsage | readUsage;
6806			const string					usageGroupName	(usageToName(usage));
6807			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6808
6809			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6810			{
6811				const vk::VkDeviceSize	size		= sizes[sizeNdx];
6812				const string			testName	(de::toString((deUint64)(size)));
6813				const TestConfig		config		=
6814				{
6815					usage,
6816					size,
6817					vk::VK_SHARING_MODE_EXCLUSIVE
6818				};
6819
6820				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
6821			}
6822
6823			group->addChild(usageGroup.get());
6824			usageGroup.release();
6825		}
6826	}
6827
6828	{
6829		Usage all = (Usage)0;
6830
6831		for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
6832			all = all | usages[usageNdx];
6833
6834		{
6835			const string					usageGroupName	("all");
6836			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6837
6838			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6839			{
6840				const vk::VkDeviceSize	size		= sizes[sizeNdx];
6841				const string			testName	(de::toString((deUint64)(size)));
6842				const TestConfig		config		=
6843				{
6844					all,
6845					size,
6846					vk::VK_SHARING_MODE_EXCLUSIVE
6847				};
6848
6849				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
6850			}
6851
6852			group->addChild(usageGroup.get());
6853			usageGroup.release();
6854		}
6855
6856		{
6857			const string					usageGroupName	("all_device");
6858			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6859
6860			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6861			{
6862				const vk::VkDeviceSize	size		= sizes[sizeNdx];
6863				const string			testName	(de::toString((deUint64)(size)));
6864				const TestConfig		config		=
6865				{
6866					(Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
6867					size,
6868					vk::VK_SHARING_MODE_EXCLUSIVE
6869				};
6870
6871				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
6872			}
6873
6874			group->addChild(usageGroup.get());
6875			usageGroup.release();
6876		}
6877	}
6878
6879	return group.release();
6880}
6881
6882} // memory
6883} // vkt
6884