1/* 2 * Copyright (c) 2015-2017 The Khronos Group Inc. 3 * Copyright (c) 2015-2017 Valve Corporation 4 * Copyright (c) 2015-2017 LunarG, Inc. 5 * Copyright (c) 2015-2017 Google, Inc. 6 * 7 * Licensed under the Apache License, Version 2.0 (the "License"); 8 * you may not use this file except in compliance with the License. 9 * You may obtain a copy of the License at 10 * 11 * http://www.apache.org/licenses/LICENSE-2.0 12 * 13 * Unless required by applicable law or agreed to in writing, software 14 * distributed under the License is distributed on an "AS IS" BASIS, 15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 * See the License for the specific language governing permissions and 17 * limitations under the License. 18 * 19 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com> 20 * Author: Tony Barbour <tony@LunarG.com> 21 * Author: Dave Houlton <daveh@lunarg.com> 22 */ 23 24#include "vkrenderframework.h" 25#include "vk_format_utils.h" 26 27#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) 28#define GET_DEVICE_PROC_ADDR(dev, entrypoint) \ 29 { \ 30 fp##entrypoint = (PFN_vk##entrypoint)vkGetDeviceProcAddr(dev, "vk" #entrypoint); \ 31 assert(fp##entrypoint != NULL); \ 32 } 33 34VkRenderFramework::VkRenderFramework() 35 : inst(VK_NULL_HANDLE), 36 m_device(NULL), 37 m_commandPool(VK_NULL_HANDLE), 38 m_commandBuffer(NULL), 39 m_renderPass(VK_NULL_HANDLE), 40 m_framebuffer(VK_NULL_HANDLE), 41 m_addRenderPassSelfDependency(false), 42 m_width(256.0), // default window width 43 m_height(256.0), // default window height 44 m_render_target_fmt(VK_FORMAT_R8G8B8A8_UNORM), 45 m_depth_stencil_fmt(VK_FORMAT_UNDEFINED), 46 m_clear_via_load_op(true), 47 m_depth_clear_color(1.0), 48 m_stencil_clear_color(0), 49 m_depthStencil(NULL), 50 m_CreateDebugReportCallback(VK_NULL_HANDLE), 51 m_DestroyDebugReportCallback(VK_NULL_HANDLE), 52 m_globalMsgCallback(VK_NULL_HANDLE), 53 m_devMsgCallback(VK_NULL_HANDLE) { 54 memset(&m_renderPassBeginInfo, 0, sizeof(m_renderPassBeginInfo)); 55 m_renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; 56 57 // clear the back buffer to dark grey 58 m_clear_color.float32[0] = 0.25f; 59 m_clear_color.float32[1] = 0.25f; 60 m_clear_color.float32[2] = 0.25f; 61 m_clear_color.float32[3] = 0.0f; 62} 63 64VkRenderFramework::~VkRenderFramework() {} 65 66VkPhysicalDevice VkRenderFramework::gpu() { 67 EXPECT_NE((VkInstance)0, inst); // Invalid to request gpu before instance exists 68 return objs[0]; 69} 70 71// Return true if layer name is found and spec+implementation values are >= requested values 72bool VkRenderFramework::InstanceLayerSupported(const char *name, uint32_t spec, uint32_t implementation) { 73 uint32_t layer_count = 0; 74 std::vector<VkLayerProperties> layer_props; 75 76 VkResult res = vkEnumerateInstanceLayerProperties(&layer_count, NULL); 77 if (VK_SUCCESS != res) return false; 78 if (0 == layer_count) return false; 79 80 layer_props.resize(layer_count); 81 res = vkEnumerateInstanceLayerProperties(&layer_count, layer_props.data()); 82 if (VK_SUCCESS != res) return false; 83 84 for (auto &it : layer_props) { 85 if (0 == strncmp(name, it.layerName, VK_MAX_EXTENSION_NAME_SIZE)) { 86 return ((it.specVersion >= spec) && (it.implementationVersion >= implementation)); 87 } 88 } 89 return false; 90} 91 92// Enable device profile as last layer on stack overriding devsim if there, or return if not available 93bool VkRenderFramework::EnableDeviceProfileLayer() { 94 if (InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) { 95 if (VkTestFramework::m_devsim_layer) { 96 assert(0 == strcmp(m_instance_layer_names.back(), "VK_LAYER_LUNARG_device_simulation")); 97 m_instance_layer_names.pop_back(); 98 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api"); 99 } else { 100 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api"); 101 } 102 } else { 103 printf(" Did not find VK_LAYER_LUNARG_device_profile_api layer; skipped.\n"); 104 return false; 105 } 106 return true; 107} 108 109// Return true if extension name is found and spec value is >= requested spec value 110bool VkRenderFramework::InstanceExtensionSupported(const char *ext_name, uint32_t spec) { 111 uint32_t ext_count = 0; 112 std::vector<VkExtensionProperties> ext_props; 113 VkResult res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, nullptr); 114 if (VK_SUCCESS != res) return false; 115 if (0 == ext_count) return false; 116 117 ext_props.resize(ext_count); 118 res = vkEnumerateInstanceExtensionProperties(nullptr, &ext_count, ext_props.data()); 119 if (VK_SUCCESS != res) return false; 120 121 for (auto &it : ext_props) { 122 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) { 123 return (it.specVersion >= spec); 124 } 125 } 126 return false; 127} 128 129// Return true if extension name is found and spec value is >= requested spec value 130bool VkRenderFramework::DeviceExtensionSupported(VkPhysicalDevice dev, const char *layer, const char *ext_name, uint32_t spec) { 131 if (!inst) { 132 EXPECT_NE((VkInstance)0, inst); // Complain, not cool without an instance 133 return false; 134 } 135 uint32_t ext_count = 0; 136 std::vector<VkExtensionProperties> ext_props; 137 VkResult res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, nullptr); 138 if (VK_SUCCESS != res) return false; 139 if (0 == ext_count) return false; 140 141 ext_props.resize(ext_count); 142 res = vkEnumerateDeviceExtensionProperties(dev, layer, &ext_count, ext_props.data()); 143 if (VK_SUCCESS != res) return false; 144 145 for (auto &it : ext_props) { 146 if (0 == strncmp(ext_name, it.extensionName, VK_MAX_EXTENSION_NAME_SIZE)) { 147 return (it.specVersion >= spec); 148 } 149 } 150 return false; 151} 152 153void VkRenderFramework::InitFramework(PFN_vkDebugReportCallbackEXT dbgFunction, void *userData) { 154 // Only enable device profile layer by default if devsim is not enabled 155 if (!VkTestFramework::m_devsim_layer && InstanceLayerSupported("VK_LAYER_LUNARG_device_profile_api")) { 156 m_instance_layer_names.push_back("VK_LAYER_LUNARG_device_profile_api"); 157 } 158 159 // Assert not already initialized 160 ASSERT_EQ((VkInstance)0, inst); 161 162 // Remove any unsupported layer names from list 163 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end();) { 164 if (!InstanceLayerSupported(*layer)) { 165 ADD_FAILURE() << "InitFramework(): Requested layer " << *layer << " was not found. Disabled."; 166 layer = m_instance_layer_names.erase(layer); 167 } else { 168 ++layer; 169 } 170 } 171 172 // Remove any unsupported instance extension names from list 173 for (auto ext = m_instance_extension_names.begin(); ext != m_instance_extension_names.end();) { 174 if (!InstanceExtensionSupported(*ext)) { 175 ADD_FAILURE() << "InitFramework(): Requested extension " << *ext << " was not found. Disabled."; 176 ext = m_instance_extension_names.erase(ext); 177 } else { 178 ++ext; 179 } 180 } 181 182 VkInstanceCreateInfo instInfo = {}; 183 VkResult U_ASSERT_ONLY err; 184 185 instInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; 186 instInfo.pNext = NULL; 187 instInfo.pApplicationInfo = &app_info; 188 instInfo.enabledLayerCount = m_instance_layer_names.size(); 189 instInfo.ppEnabledLayerNames = m_instance_layer_names.data(); 190 instInfo.enabledExtensionCount = m_instance_extension_names.size(); 191 instInfo.ppEnabledExtensionNames = m_instance_extension_names.data(); 192 err = vkCreateInstance(&instInfo, NULL, &this->inst); 193 ASSERT_VK_SUCCESS(err); 194 195 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, NULL); 196 ASSERT_LE(this->gpu_count, ARRAY_SIZE(objs)) << "Too many gpus"; 197 ASSERT_VK_SUCCESS(err); 198 err = vkEnumeratePhysicalDevices(inst, &this->gpu_count, objs); 199 ASSERT_VK_SUCCESS(err); 200 ASSERT_GE(this->gpu_count, (uint32_t)1) << "No GPU available"; 201 if (dbgFunction) { 202 m_CreateDebugReportCallback = 203 (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkCreateDebugReportCallbackEXT"); 204 ASSERT_NE(m_CreateDebugReportCallback, (PFN_vkCreateDebugReportCallbackEXT)NULL) 205 << "Did not get function pointer for CreateDebugReportCallback"; 206 if (m_CreateDebugReportCallback) { 207 VkDebugReportCallbackCreateInfoEXT dbgCreateInfo; 208 memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo)); 209 dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT; 210 dbgCreateInfo.flags = 211 VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT; 212 dbgCreateInfo.pfnCallback = dbgFunction; 213 dbgCreateInfo.pUserData = userData; 214 215 err = m_CreateDebugReportCallback(this->inst, &dbgCreateInfo, NULL, &m_globalMsgCallback); 216 ASSERT_VK_SUCCESS(err); 217 218 m_DestroyDebugReportCallback = 219 (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(this->inst, "vkDestroyDebugReportCallbackEXT"); 220 ASSERT_NE(m_DestroyDebugReportCallback, (PFN_vkDestroyDebugReportCallbackEXT)NULL) 221 << "Did not get function pointer for DestroyDebugReportCallback"; 222 m_DebugReportMessage = (PFN_vkDebugReportMessageEXT)vkGetInstanceProcAddr(this->inst, "vkDebugReportMessageEXT"); 223 ASSERT_NE(m_DebugReportMessage, (PFN_vkDebugReportMessageEXT)NULL) 224 << "Did not get function pointer for DebugReportMessage"; 225 } 226 } 227} 228 229void VkRenderFramework::ShutdownFramework() { 230 // Nothing to shut down without a VkInstance 231 if (!this->inst) return; 232 233 delete m_commandBuffer; 234 delete m_commandPool; 235 if (m_framebuffer) vkDestroyFramebuffer(device(), m_framebuffer, NULL); 236 if (m_renderPass) vkDestroyRenderPass(device(), m_renderPass, NULL); 237 238 if (m_globalMsgCallback) m_DestroyDebugReportCallback(this->inst, m_globalMsgCallback, NULL); 239 if (m_devMsgCallback) m_DestroyDebugReportCallback(this->inst, m_devMsgCallback, NULL); 240 241 while (!m_renderTargets.empty()) { 242 vkDestroyImageView(device(), m_renderTargets.back()->targetView(m_render_target_fmt), NULL); 243 vkDestroyImage(device(), m_renderTargets.back()->image(), NULL); 244 vkFreeMemory(device(), m_renderTargets.back()->memory(), NULL); 245 m_renderTargets.pop_back(); 246 } 247 248 delete m_depthStencil; 249 250 // reset the driver 251 delete m_device; 252 if (this->inst) vkDestroyInstance(this->inst, NULL); 253 this->inst = (VkInstance)0; // In case we want to re-initialize 254} 255 256void VkRenderFramework::GetPhysicalDeviceFeatures(VkPhysicalDeviceFeatures *features) { 257 if (NULL == m_device) { 258 VkDeviceObj *temp_device = new VkDeviceObj(0, objs[0], m_device_extension_names); 259 *features = temp_device->phy().features(); 260 delete (temp_device); 261 } else { 262 *features = m_device->phy().features(); 263 } 264} 265 266void VkRenderFramework::InitState(VkPhysicalDeviceFeatures *features, const VkCommandPoolCreateFlags flags) { 267 // Remove any unsupported device extension names from list 268 for (auto ext = m_device_extension_names.begin(); ext != m_device_extension_names.end();) { 269 if (!DeviceExtensionSupported(objs[0], nullptr, *ext)) { 270 bool found = false; 271 for (auto layer = m_instance_layer_names.begin(); layer != m_instance_layer_names.end();) { 272 if (!DeviceExtensionSupported(objs[0], *layer, *ext)) { 273 found = true; 274 break; 275 } 276 } 277 if (!found) { 278 ADD_FAILURE() << "InitState(): The requested device extension " << *ext << " was not found. Disabled."; 279 ext = m_device_extension_names.erase(ext); 280 } else { 281 ++ext; 282 } 283 } else { 284 ++ext; 285 } 286 } 287 288 m_device = new VkDeviceObj(0, objs[0], m_device_extension_names, features); 289 m_device->get_device_queue(); 290 291 m_depthStencil = new VkDepthStencilObj(m_device); 292 293 m_render_target_fmt = VkTestFramework::GetFormat(inst, m_device); 294 295 m_lineWidth = 1.0f; 296 297 m_depthBiasConstantFactor = 0.0f; 298 m_depthBiasClamp = 0.0f; 299 m_depthBiasSlopeFactor = 0.0f; 300 301 m_blendConstants[0] = 1.0f; 302 m_blendConstants[1] = 1.0f; 303 m_blendConstants[2] = 1.0f; 304 m_blendConstants[3] = 1.0f; 305 306 m_minDepthBounds = 0.f; 307 m_maxDepthBounds = 1.f; 308 309 m_compareMask = 0xff; 310 m_writeMask = 0xff; 311 m_reference = 0; 312 313 m_commandPool = new VkCommandPoolObj(m_device, m_device->graphics_queue_node_index_, flags); 314 315 m_commandBuffer = new VkCommandBufferObj(m_device, m_commandPool); 316} 317 318void VkRenderFramework::InitViewport(float width, float height) { 319 VkViewport viewport; 320 VkRect2D scissor; 321 viewport.x = 0; 322 viewport.y = 0; 323 viewport.width = 1.f * width; 324 viewport.height = 1.f * height; 325 viewport.minDepth = 0.f; 326 viewport.maxDepth = 1.f; 327 m_viewports.push_back(viewport); 328 329 scissor.extent.width = (int32_t)width; 330 scissor.extent.height = (int32_t)height; 331 scissor.offset.x = 0; 332 scissor.offset.y = 0; 333 m_scissors.push_back(scissor); 334 335 m_width = width; 336 m_height = height; 337} 338 339void VkRenderFramework::InitViewport() { InitViewport(m_width, m_height); } 340void VkRenderFramework::InitRenderTarget() { InitRenderTarget(1); } 341 342void VkRenderFramework::InitRenderTarget(uint32_t targets) { InitRenderTarget(targets, NULL); } 343 344void VkRenderFramework::InitRenderTarget(VkImageView *dsBinding) { InitRenderTarget(1, dsBinding); } 345 346void VkRenderFramework::InitRenderTarget(uint32_t targets, VkImageView *dsBinding) { 347 std::vector<VkAttachmentDescription> attachments; 348 std::vector<VkAttachmentReference> color_references; 349 std::vector<VkImageView> bindings; 350 attachments.reserve(targets + 1); // +1 for dsBinding 351 color_references.reserve(targets); 352 bindings.reserve(targets + 1); // +1 for dsBinding 353 354 VkAttachmentDescription att = {}; 355 att.format = m_render_target_fmt; 356 att.samples = VK_SAMPLE_COUNT_1_BIT; 357 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD; 358 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE; 359 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; 360 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; 361 att.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; 362 att.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 363 364 VkAttachmentReference ref = {}; 365 ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 366 367 m_renderPassClearValues.clear(); 368 VkClearValue clear = {}; 369 clear.color = m_clear_color; 370 371 for (uint32_t i = 0; i < targets; i++) { 372 attachments.push_back(att); 373 374 ref.attachment = i; 375 color_references.push_back(ref); 376 377 m_renderPassClearValues.push_back(clear); 378 379 VkImageObj *img = new VkImageObj(m_device); 380 381 VkFormatProperties props; 382 383 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), m_render_target_fmt, &props); 384 385 if (props.linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) { 386 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt, 387 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, 388 VK_IMAGE_TILING_LINEAR); 389 } else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) { 390 img->Init((uint32_t)m_width, (uint32_t)m_height, 1, m_render_target_fmt, 391 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, 392 VK_IMAGE_TILING_OPTIMAL); 393 } else { 394 FAIL() << "Neither Linear nor Optimal allowed for render target"; 395 } 396 397 m_renderTargets.push_back(img); 398 bindings.push_back(img->targetView(m_render_target_fmt)); 399 } 400 401 VkSubpassDescription subpass = {}; 402 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; 403 subpass.flags = 0; 404 subpass.inputAttachmentCount = 0; 405 subpass.pInputAttachments = NULL; 406 subpass.colorAttachmentCount = targets; 407 subpass.pColorAttachments = color_references.data(); 408 subpass.pResolveAttachments = NULL; 409 410 VkAttachmentReference ds_reference; 411 if (dsBinding) { 412 att.format = m_depth_stencil_fmt; 413 att.loadOp = (m_clear_via_load_op) ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD; 414 ; 415 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE; 416 att.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; 417 att.stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE; 418 att.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 419 att.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 420 attachments.push_back(att); 421 422 clear.depthStencil.depth = m_depth_clear_color; 423 clear.depthStencil.stencil = m_stencil_clear_color; 424 m_renderPassClearValues.push_back(clear); 425 426 bindings.push_back(*dsBinding); 427 428 ds_reference.attachment = targets; 429 ds_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; 430 subpass.pDepthStencilAttachment = &ds_reference; 431 } else { 432 subpass.pDepthStencilAttachment = NULL; 433 } 434 435 subpass.preserveAttachmentCount = 0; 436 subpass.pPreserveAttachments = NULL; 437 438 VkRenderPassCreateInfo rp_info = {}; 439 rp_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 440 rp_info.attachmentCount = attachments.size(); 441 rp_info.pAttachments = attachments.data(); 442 rp_info.subpassCount = 1; 443 rp_info.pSubpasses = &subpass; 444 VkSubpassDependency subpass_dep = {}; 445 if (m_addRenderPassSelfDependency) { 446 // Add a subpass self-dependency to subpass 0 of default renderPass 447 subpass_dep.srcSubpass = 0; 448 subpass_dep.dstSubpass = 0; 449 // Just using all framebuffer-space pipeline stages in order to get a reasonably large 450 // set of bits that can be used for both src & dst 451 subpass_dep.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | 452 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; 453 subpass_dep.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | 454 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; 455 // Add all of the gfx mem access bits that correlate to the fb-space pipeline stages 456 subpass_dep.srcAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | 457 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | 458 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 459 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; 460 subpass_dep.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | 461 VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | 462 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 463 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; 464 // Must include dep_by_region bit when src & dst both include framebuffer-space stages 465 subpass_dep.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; 466 rp_info.dependencyCount = 1; 467 rp_info.pDependencies = &subpass_dep; 468 } 469 470 vkCreateRenderPass(device(), &rp_info, NULL, &m_renderPass); 471 renderPass_info_ = rp_info; // Save away a copy for tests that need access to the render pass state 472 // Create Framebuffer and RenderPass with color attachments and any 473 // depth/stencil attachment 474 VkFramebufferCreateInfo fb_info = {}; 475 fb_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; 476 fb_info.pNext = NULL; 477 fb_info.renderPass = m_renderPass; 478 fb_info.attachmentCount = bindings.size(); 479 fb_info.pAttachments = bindings.data(); 480 fb_info.width = (uint32_t)m_width; 481 fb_info.height = (uint32_t)m_height; 482 fb_info.layers = 1; 483 484 vkCreateFramebuffer(device(), &fb_info, NULL, &m_framebuffer); 485 486 m_renderPassBeginInfo.renderPass = m_renderPass; 487 m_renderPassBeginInfo.framebuffer = m_framebuffer; 488 m_renderPassBeginInfo.renderArea.extent.width = (int32_t)m_width; 489 m_renderPassBeginInfo.renderArea.extent.height = (int32_t)m_height; 490 m_renderPassBeginInfo.clearValueCount = m_renderPassClearValues.size(); 491 m_renderPassBeginInfo.pClearValues = m_renderPassClearValues.data(); 492} 493 494VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj) : vk_testing::Device(obj), id(id) { 495 init(); 496 497 props = phy().properties(); 498 queue_props = phy().queue_properties(); 499} 500 501VkDeviceObj::VkDeviceObj(uint32_t id, VkPhysicalDevice obj, std::vector<const char *> &extension_names, 502 VkPhysicalDeviceFeatures *features) 503 : vk_testing::Device(obj), id(id) { 504 init(extension_names, features); 505 506 props = phy().properties(); 507 queue_props = phy().queue_properties(); 508} 509 510uint32_t VkDeviceObj::QueueFamilyMatching(VkQueueFlags with, VkQueueFlags without, bool all_bits) { 511 // Find a queue family with and without desired capabilities 512 for (uint32_t i = 0; i < queue_props.size(); i++) { 513 auto flags = queue_props[i].queueFlags; 514 bool matches = all_bits ? (flags & with) == with : (flags & with) != 0; 515 if (matches && ((flags & without) == 0) && (queue_props[i].queueCount > 0)) { 516 return i; 517 } 518 } 519 return UINT32_MAX; 520} 521 522void VkDeviceObj::get_device_queue() { 523 ASSERT_NE(true, graphics_queues().empty()); 524 m_queue = graphics_queues()[0]->handle(); 525} 526 527VkDescriptorSetLayoutObj::VkDescriptorSetLayoutObj(const VkDeviceObj *device, 528 const std::vector<VkDescriptorSetLayoutBinding> &descriptor_set_bindings, 529 VkDescriptorSetLayoutCreateFlags flags) { 530 VkDescriptorSetLayoutCreateInfo dsl_ci = {}; 531 dsl_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; 532 dsl_ci.flags = flags; 533 dsl_ci.bindingCount = static_cast<uint32_t>(descriptor_set_bindings.size()); 534 dsl_ci.pBindings = descriptor_set_bindings.data(); 535 536 init(*device, dsl_ci); 537} 538 539VkDescriptorSetObj::VkDescriptorSetObj(VkDeviceObj *device) : m_device(device), m_nextSlot(0) {} 540 541VkDescriptorSetObj::~VkDescriptorSetObj() { 542 if (m_set) { 543 delete m_set; 544 } 545} 546 547int VkDescriptorSetObj::AppendDummy() { 548 /* request a descriptor but do not update it */ 549 VkDescriptorSetLayoutBinding binding = {}; 550 binding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; 551 binding.descriptorCount = 1; 552 binding.binding = m_layout_bindings.size(); 553 binding.stageFlags = VK_SHADER_STAGE_ALL; 554 binding.pImmutableSamplers = NULL; 555 556 m_layout_bindings.push_back(binding); 557 m_type_counts[VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] += binding.descriptorCount; 558 559 return m_nextSlot++; 560} 561 562int VkDescriptorSetObj::AppendBuffer(VkDescriptorType type, VkConstantBufferObj &constantBuffer) { 563 assert(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || 564 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC); 565 VkDescriptorSetLayoutBinding binding = {}; 566 binding.descriptorType = type; 567 binding.descriptorCount = 1; 568 binding.binding = m_layout_bindings.size(); 569 binding.stageFlags = VK_SHADER_STAGE_ALL; 570 binding.pImmutableSamplers = NULL; 571 572 m_layout_bindings.push_back(binding); 573 m_type_counts[type] += binding.descriptorCount; 574 575 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, type, 1, 576 &constantBuffer.m_descriptorBufferInfo)); 577 578 return m_nextSlot++; 579} 580 581int VkDescriptorSetObj::AppendSamplerTexture(VkSamplerObj *sampler, VkTextureObj *texture) { 582 VkDescriptorSetLayoutBinding binding = {}; 583 binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; 584 binding.descriptorCount = 1; 585 binding.binding = m_layout_bindings.size(); 586 binding.stageFlags = VK_SHADER_STAGE_ALL; 587 binding.pImmutableSamplers = NULL; 588 589 m_layout_bindings.push_back(binding); 590 m_type_counts[VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] += binding.descriptorCount; 591 VkDescriptorImageInfo tmp = texture->m_imageInfo; 592 tmp.sampler = sampler->handle(); 593 m_imageSamplerDescriptors.push_back(tmp); 594 595 m_writes.push_back(vk_testing::Device::write_descriptor_set(vk_testing::DescriptorSet(), m_nextSlot, 0, 596 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &tmp)); 597 598 return m_nextSlot++; 599} 600 601VkPipelineLayout VkDescriptorSetObj::GetPipelineLayout() const { return m_pipeline_layout.handle(); } 602 603VkDescriptorSet VkDescriptorSetObj::GetDescriptorSetHandle() const { 604 if (m_set) 605 return m_set->handle(); 606 else 607 return VK_NULL_HANDLE; 608} 609 610void VkDescriptorSetObj::CreateVKDescriptorSet(VkCommandBufferObj *commandBuffer) { 611 if (m_type_counts.size()) { 612 // create VkDescriptorPool 613 VkDescriptorPoolSize poolSize; 614 vector<VkDescriptorPoolSize> sizes; 615 for (auto it = m_type_counts.begin(); it != m_type_counts.end(); ++it) { 616 poolSize.descriptorCount = it->second; 617 poolSize.type = it->first; 618 sizes.push_back(poolSize); 619 } 620 VkDescriptorPoolCreateInfo pool = {}; 621 pool.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; 622 pool.poolSizeCount = sizes.size(); 623 pool.maxSets = 1; 624 pool.pPoolSizes = sizes.data(); 625 init(*m_device, pool); 626 } 627 628 // create VkDescriptorSetLayout 629 VkDescriptorSetLayoutCreateInfo layout = {}; 630 layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; 631 layout.bindingCount = m_layout_bindings.size(); 632 layout.pBindings = m_layout_bindings.data(); 633 634 m_layout.init(*m_device, layout); 635 vector<const vk_testing::DescriptorSetLayout *> layouts; 636 layouts.push_back(&m_layout); 637 638 // create VkPipelineLayout 639 VkPipelineLayoutCreateInfo pipeline_layout = {}; 640 pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 641 pipeline_layout.setLayoutCount = layouts.size(); 642 pipeline_layout.pSetLayouts = NULL; 643 644 m_pipeline_layout.init(*m_device, pipeline_layout, layouts); 645 646 if (m_type_counts.size()) { 647 // create VkDescriptorSet 648 m_set = alloc_sets(*m_device, m_layout); 649 650 // build the update array 651 size_t imageSamplerCount = 0; 652 for (std::vector<VkWriteDescriptorSet>::iterator it = m_writes.begin(); it != m_writes.end(); it++) { 653 it->dstSet = m_set->handle(); 654 if (it->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) 655 it->pImageInfo = &m_imageSamplerDescriptors[imageSamplerCount++]; 656 } 657 658 // do the updates 659 m_device->update_descriptor_sets(m_writes); 660 } 661} 662 663VkRenderpassObj::VkRenderpassObj(VkDeviceObj *dev) { 664 // Create a renderPass with a single color attachment 665 VkAttachmentReference attach = {}; 666 attach.layout = VK_IMAGE_LAYOUT_GENERAL; 667 668 VkSubpassDescription subpass = {}; 669 subpass.pColorAttachments = &attach; 670 subpass.colorAttachmentCount = 1; 671 672 VkRenderPassCreateInfo rpci = {}; 673 rpci.subpassCount = 1; 674 rpci.pSubpasses = &subpass; 675 rpci.attachmentCount = 1; 676 677 VkAttachmentDescription attach_desc = {}; 678 attach_desc.format = VK_FORMAT_B8G8R8A8_UNORM; 679 attach_desc.samples = VK_SAMPLE_COUNT_1_BIT; 680 attach_desc.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; 681 attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL; 682 683 rpci.pAttachments = &attach_desc; 684 rpci.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; 685 686 device = dev->device(); 687 vkCreateRenderPass(device, &rpci, NULL, &m_renderpass); 688} 689 690VkRenderpassObj::~VkRenderpassObj() { vkDestroyRenderPass(device, m_renderpass, NULL); } 691 692VkImageObj::VkImageObj(VkDeviceObj *dev) { 693 m_device = dev; 694 m_descriptorImageInfo.imageView = VK_NULL_HANDLE; 695 m_descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_GENERAL; 696} 697 698void VkImageObj::ImageMemoryBarrier(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkFlags output_mask /*= 699 VK_ACCESS_HOST_WRITE_BIT | 700 VK_ACCESS_SHADER_WRITE_BIT | 701 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | 702 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | 703 VK_MEMORY_OUTPUT_COPY_BIT*/, 704 VkFlags input_mask /*= 705 VK_ACCESS_HOST_READ_BIT | 706 VK_ACCESS_INDIRECT_COMMAND_READ_BIT | 707 VK_ACCESS_INDEX_READ_BIT | 708 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | 709 VK_ACCESS_UNIFORM_READ_BIT | 710 VK_ACCESS_SHADER_READ_BIT | 711 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | 712 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 713 VK_MEMORY_INPUT_COPY_BIT*/, 714 VkImageLayout image_layout) { 715 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS 716 const VkImageSubresourceRange subresourceRange = 717 subresource_range(aspect, 0, /*VK_REMAINING_MIP_LEVELS*/ 1, 0, 1 /*VK_REMAINING_ARRAY_LAYERS*/); 718 VkImageMemoryBarrier barrier; 719 barrier = image_memory_barrier(output_mask, input_mask, Layout(), image_layout, subresourceRange); 720 721 VkImageMemoryBarrier *pmemory_barrier = &barrier; 722 723 VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; 724 VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT; 725 726 // write barrier to the command buffer 727 vkCmdPipelineBarrier(cmd_buf->handle(), src_stages, dest_stages, 0, 0, NULL, 0, NULL, 1, pmemory_barrier); 728} 729 730void VkImageObj::SetLayout(VkCommandBufferObj *cmd_buf, VkImageAspectFlags aspect, VkImageLayout image_layout) { 731 VkFlags src_mask, dst_mask; 732 const VkFlags all_cache_outputs = VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | 733 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT; 734 const VkFlags all_cache_inputs = VK_ACCESS_HOST_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT | 735 VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_SHADER_READ_BIT | 736 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 737 VK_ACCESS_MEMORY_READ_BIT; 738 739 if (image_layout == m_descriptorImageInfo.imageLayout) { 740 return; 741 } 742 743 switch (image_layout) { 744 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: 745 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) 746 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 747 else 748 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT; 749 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_TRANSFER_READ_BIT; 750 break; 751 752 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: 753 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) 754 src_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 755 else if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) 756 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; 757 else 758 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT; 759 dst_mask = VK_ACCESS_TRANSFER_WRITE_BIT; 760 break; 761 762 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: 763 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) 764 src_mask = VK_ACCESS_TRANSFER_WRITE_BIT; 765 else 766 src_mask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; 767 dst_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_MEMORY_READ_BIT; 768 break; 769 770 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: 771 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) 772 src_mask = VK_ACCESS_TRANSFER_READ_BIT; 773 else 774 src_mask = 0; 775 dst_mask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; 776 break; 777 778 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: 779 dst_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; 780 src_mask = all_cache_outputs; 781 break; 782 783 default: 784 src_mask = all_cache_outputs; 785 dst_mask = all_cache_inputs; 786 break; 787 } 788 789 if (m_descriptorImageInfo.imageLayout == VK_IMAGE_LAYOUT_UNDEFINED) src_mask = 0; 790 791 ImageMemoryBarrier(cmd_buf, aspect, src_mask, dst_mask, image_layout); 792 m_descriptorImageInfo.imageLayout = image_layout; 793} 794 795void VkImageObj::SetLayout(VkImageAspectFlags aspect, VkImageLayout image_layout) { 796 if (image_layout == m_descriptorImageInfo.imageLayout) { 797 return; 798 } 799 800 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_); 801 VkCommandBufferObj cmd_buf(m_device, &pool); 802 803 /* Build command buffer to set image layout in the driver */ 804 cmd_buf.begin(); 805 SetLayout(&cmd_buf, aspect, image_layout); 806 cmd_buf.end(); 807 808 cmd_buf.QueueCommandBuffer(); 809} 810 811bool VkImageObj::IsCompatible(const VkImageUsageFlags usages, const VkFormatFeatureFlags features) { 812 VkFormatFeatureFlags all_feature_flags = 813 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT | 814 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT | 815 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT | 816 VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | 817 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT | 818 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT; 819 if (m_device->IsEnbledExtension(VK_IMG_FILTER_CUBIC_EXTENSION_NAME)) { 820 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG; 821 } 822 823 if (m_device->IsEnbledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) { 824 all_feature_flags |= VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR; 825 } 826 827 if (m_device->IsEnbledExtension(VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME)) { 828 all_feature_flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT; 829 } 830 831 if (m_device->IsEnbledExtension(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) { 832 all_feature_flags |= VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR | 833 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR | 834 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR | 835 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR | 836 VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR | 837 VK_FORMAT_FEATURE_DISJOINT_BIT_KHR | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR; 838 } 839 840 if ((features & all_feature_flags) == 0) return false; // whole format unsupported 841 842 if ((usages & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) return false; 843 if ((usages & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) return false; 844 if ((usages & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) return false; 845 if ((usages & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) 846 return false; 847 848 if (m_device->IsEnbledExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME)) { 849 // WORKAROUND: for DevSim not reporting extended enums, and possibly some drivers too 850 const auto all_nontransfer_feature_flags = 851 all_feature_flags ^ (VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR); 852 const bool transfer_probably_supported_anyway = (features & all_nontransfer_feature_flags) > 0; 853 if (!transfer_probably_supported_anyway) { 854 if ((usages & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR)) return false; 855 if ((usages & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR)) return false; 856 } 857 } 858 859 return true; 860} 861 862void VkImageObj::InitNoLayout(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format, 863 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs, 864 const std::vector<uint32_t> *queue_families) { 865 VkFormatProperties image_fmt; 866 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; 867 868 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), format, &image_fmt); 869 870 if (requested_tiling == VK_IMAGE_TILING_LINEAR) { 871 if (IsCompatible(usage, image_fmt.linearTilingFeatures)) { 872 tiling = VK_IMAGE_TILING_LINEAR; 873 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) { 874 tiling = VK_IMAGE_TILING_OPTIMAL; 875 } else { 876 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage 877 << ", supported linear features: " << image_fmt.linearTilingFeatures; 878 } 879 } else if (IsCompatible(usage, image_fmt.optimalTilingFeatures)) { 880 tiling = VK_IMAGE_TILING_OPTIMAL; 881 } else if (IsCompatible(usage, image_fmt.linearTilingFeatures)) { 882 tiling = VK_IMAGE_TILING_LINEAR; 883 } else { 884 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase << usage 885 << ", supported optimal features: " << image_fmt.optimalTilingFeatures; 886 } 887 888 VkImageCreateInfo imageCreateInfo = vk_testing::Image::create_info(); 889 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D; 890 imageCreateInfo.format = format; 891 imageCreateInfo.extent.width = width; 892 imageCreateInfo.extent.height = height; 893 imageCreateInfo.mipLevels = mipLevels; 894 imageCreateInfo.tiling = tiling; 895 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; 896 897 // Automatically set sharing mode etc. based on queue family information 898 if (queue_families && (queue_families->size() > 1)) { 899 imageCreateInfo.sharingMode = VK_SHARING_MODE_CONCURRENT; 900 imageCreateInfo.queueFamilyIndexCount = static_cast<uint32_t>(queue_families->size()); 901 imageCreateInfo.pQueueFamilyIndices = queue_families->data(); 902 } 903 904 Layout(imageCreateInfo.initialLayout); 905 imageCreateInfo.usage = usage; 906 907 vk_testing::Image::init(*m_device, imageCreateInfo, reqs); 908} 909 910void VkImageObj::Init(uint32_t const width, uint32_t const height, uint32_t const mipLevels, VkFormat const format, 911 VkFlags const usage, VkImageTiling const requested_tiling, VkMemoryPropertyFlags const reqs, 912 const std::vector<uint32_t> *queue_families) { 913 InitNoLayout(width, height, mipLevels, format, usage, requested_tiling, reqs, queue_families); 914 915 VkImageLayout newLayout; 916 if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) 917 newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; 918 else if (usage & VK_IMAGE_USAGE_SAMPLED_BIT) 919 newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; 920 else 921 newLayout = m_descriptorImageInfo.imageLayout; 922 923 VkImageAspectFlags image_aspect = 0; 924 if (FormatIsDepthAndStencil(format)) { 925 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; 926 } else if (FormatIsDepthOnly(format)) { 927 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT; 928 } else if (FormatIsStencilOnly(format)) { 929 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT; 930 } else { // color 931 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT; 932 } 933 SetLayout(image_aspect, newLayout); 934} 935 936void VkImageObj::init(const VkImageCreateInfo *create_info) { 937 VkFormatProperties image_fmt; 938 vkGetPhysicalDeviceFormatProperties(m_device->phy().handle(), create_info->format, &image_fmt); 939 940 switch (create_info->tiling) { 941 case VK_IMAGE_TILING_OPTIMAL: 942 if (!IsCompatible(create_info->usage, image_fmt.optimalTilingFeatures)) { 943 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase 944 << create_info->usage << ", supported optimal features: " << image_fmt.optimalTilingFeatures; 945 } 946 break; 947 case VK_IMAGE_TILING_LINEAR: 948 if (!IsCompatible(create_info->usage, image_fmt.linearTilingFeatures)) { 949 FAIL() << "VkImageObj::init() error: unsupported tiling configuration. Usage: " << std::hex << std::showbase 950 << create_info->usage << ", supported linear features: " << image_fmt.linearTilingFeatures; 951 } 952 break; 953 default: 954 break; 955 } 956 Layout(create_info->initialLayout); 957 958 vk_testing::Image::init(*m_device, *create_info, 0); 959 960 VkImageAspectFlags image_aspect = 0; 961 if (FormatIsDepthAndStencil(create_info->format)) { 962 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; 963 } else if (FormatIsDepthOnly(create_info->format)) { 964 image_aspect = VK_IMAGE_ASPECT_DEPTH_BIT; 965 } else if (FormatIsStencilOnly(create_info->format)) { 966 image_aspect = VK_IMAGE_ASPECT_STENCIL_BIT; 967 } else { // color 968 image_aspect = VK_IMAGE_ASPECT_COLOR_BIT; 969 } 970 SetLayout(image_aspect, VK_IMAGE_LAYOUT_GENERAL); 971} 972 973VkResult VkImageObj::CopyImage(VkImageObj &src_image) { 974 VkImageLayout src_image_layout, dest_image_layout; 975 976 VkCommandPoolObj pool(m_device, m_device->graphics_queue_node_index_); 977 VkCommandBufferObj cmd_buf(m_device, &pool); 978 979 /* Build command buffer to copy staging texture to usable texture */ 980 cmd_buf.begin(); 981 982 /* TODO: Can we determine image aspect from image object? */ 983 src_image_layout = src_image.Layout(); 984 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); 985 986 dest_image_layout = (this->Layout() == VK_IMAGE_LAYOUT_UNDEFINED) ? VK_IMAGE_LAYOUT_GENERAL : this->Layout(); 987 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); 988 989 VkImageCopy copy_region = {}; 990 copy_region.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 991 copy_region.srcSubresource.baseArrayLayer = 0; 992 copy_region.srcSubresource.mipLevel = 0; 993 copy_region.srcSubresource.layerCount = 1; 994 copy_region.srcOffset.x = 0; 995 copy_region.srcOffset.y = 0; 996 copy_region.srcOffset.z = 0; 997 copy_region.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 998 copy_region.dstSubresource.baseArrayLayer = 0; 999 copy_region.dstSubresource.mipLevel = 0; 1000 copy_region.dstSubresource.layerCount = 1; 1001 copy_region.dstOffset.x = 0; 1002 copy_region.dstOffset.y = 0; 1003 copy_region.dstOffset.z = 0; 1004 copy_region.extent = src_image.extent(); 1005 1006 vkCmdCopyImage(cmd_buf.handle(), src_image.handle(), src_image.Layout(), handle(), Layout(), 1, ©_region); 1007 1008 src_image.SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, src_image_layout); 1009 1010 this->SetLayout(&cmd_buf, VK_IMAGE_ASPECT_COLOR_BIT, dest_image_layout); 1011 1012 cmd_buf.end(); 1013 1014 cmd_buf.QueueCommandBuffer(); 1015 1016 return VK_SUCCESS; 1017} 1018 1019VkTextureObj::VkTextureObj(VkDeviceObj *device, uint32_t *colors) : VkImageObj(device) { 1020 m_device = device; 1021 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM; 1022 uint32_t tex_colors[2] = {0xffff0000, 0xff00ff00}; 1023 void *data; 1024 uint32_t x, y; 1025 VkImageObj stagingImage(device); 1026 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 1027 1028 stagingImage.Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 1029 VK_IMAGE_TILING_LINEAR, reqs); 1030 VkSubresourceLayout layout = stagingImage.subresource_layout(subresource(VK_IMAGE_ASPECT_COLOR_BIT, 0, 0)); 1031 1032 if (colors == NULL) colors = tex_colors; 1033 1034 memset(&m_imageInfo, 0, sizeof(m_imageInfo)); 1035 1036 VkImageViewCreateInfo view = {}; 1037 view.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 1038 view.pNext = NULL; 1039 view.image = VK_NULL_HANDLE; 1040 view.viewType = VK_IMAGE_VIEW_TYPE_2D; 1041 view.format = tex_format; 1042 view.components.r = VK_COMPONENT_SWIZZLE_R; 1043 view.components.g = VK_COMPONENT_SWIZZLE_G; 1044 view.components.b = VK_COMPONENT_SWIZZLE_B; 1045 view.components.a = VK_COMPONENT_SWIZZLE_A; 1046 view.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 1047 view.subresourceRange.baseMipLevel = 0; 1048 view.subresourceRange.levelCount = 1; 1049 view.subresourceRange.baseArrayLayer = 0; 1050 view.subresourceRange.layerCount = 1; 1051 1052 /* create image */ 1053 Init(16, 16, 1, tex_format, VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_OPTIMAL); 1054 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL); 1055 1056 /* create image view */ 1057 view.image = handle(); 1058 m_textureView.init(*m_device, view); 1059 m_imageInfo.imageView = m_textureView.handle(); 1060 1061 data = stagingImage.MapMemory(); 1062 1063 for (y = 0; y < extent().height; y++) { 1064 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y); 1065 for (x = 0; x < extent().width; x++) row[x] = colors[(x & 1) ^ (y & 1)]; 1066 } 1067 stagingImage.UnmapMemory(); 1068 stagingImage.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); 1069 VkImageObj::CopyImage(stagingImage); 1070} 1071 1072VkSamplerObj::VkSamplerObj(VkDeviceObj *device) { 1073 m_device = device; 1074 1075 VkSamplerCreateInfo samplerCreateInfo; 1076 memset(&samplerCreateInfo, 0, sizeof(samplerCreateInfo)); 1077 samplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; 1078 samplerCreateInfo.magFilter = VK_FILTER_NEAREST; 1079 samplerCreateInfo.minFilter = VK_FILTER_NEAREST; 1080 samplerCreateInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST; 1081 samplerCreateInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT; 1082 samplerCreateInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT; 1083 samplerCreateInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT; 1084 samplerCreateInfo.mipLodBias = 0.0; 1085 samplerCreateInfo.anisotropyEnable = VK_FALSE; 1086 samplerCreateInfo.maxAnisotropy = 1; 1087 samplerCreateInfo.compareOp = VK_COMPARE_OP_NEVER; 1088 samplerCreateInfo.minLod = 0.0; 1089 samplerCreateInfo.maxLod = 0.0; 1090 samplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE; 1091 samplerCreateInfo.unnormalizedCoordinates = VK_FALSE; 1092 1093 init(*m_device, samplerCreateInfo); 1094} 1095 1096/* 1097 * Basic ConstantBuffer constructor. Then use create methods to fill in the 1098 * details. 1099 */ 1100VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkBufferUsageFlags usage) { 1101 m_device = device; 1102 1103 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo)); 1104 1105 // Special case for usages outside of original limits of framework 1106 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) != usage) { 1107 init_no_mem(*m_device, create_info(0, usage)); 1108 } 1109} 1110 1111VkConstantBufferObj::VkConstantBufferObj(VkDeviceObj *device, VkDeviceSize allocationSize, const void *data, 1112 VkBufferUsageFlags usage) { 1113 m_device = device; 1114 1115 memset(&m_descriptorBufferInfo, 0, sizeof(m_descriptorBufferInfo)); 1116 1117 VkMemoryPropertyFlags reqs = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; 1118 1119 if ((VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT) == usage) { 1120 init_as_src_and_dst(*m_device, allocationSize, reqs); 1121 } else { 1122 init(*m_device, create_info(allocationSize, usage), reqs); 1123 } 1124 1125 void *pData = memory().map(); 1126 memcpy(pData, data, static_cast<size_t>(allocationSize)); 1127 memory().unmap(); 1128 1129 /* 1130 * Constant buffers are going to be used as vertex input buffers 1131 * or as shader uniform buffers. So, we'll create the shaderbuffer 1132 * descriptor here so it's ready if needed. 1133 */ 1134 this->m_descriptorBufferInfo.buffer = handle(); 1135 this->m_descriptorBufferInfo.offset = 0; 1136 this->m_descriptorBufferInfo.range = allocationSize; 1137} 1138 1139VkPipelineShaderStageCreateInfo const &VkShaderObj::GetStageCreateInfo() const { return m_stage_info; } 1140 1141VkShaderObj::VkShaderObj(VkDeviceObj *device, const char *shader_code, VkShaderStageFlagBits stage, VkRenderFramework *framework, 1142 char const *name) { 1143 VkResult U_ASSERT_ONLY err = VK_SUCCESS; 1144 std::vector<unsigned int> spv; 1145 VkShaderModuleCreateInfo moduleCreateInfo; 1146 1147 m_device = device; 1148 m_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 1149 m_stage_info.pNext = nullptr; 1150 m_stage_info.flags = 0; 1151 m_stage_info.stage = stage; 1152 m_stage_info.module = VK_NULL_HANDLE; 1153 m_stage_info.pName = name; 1154 m_stage_info.pSpecializationInfo = nullptr; 1155 1156 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; 1157 moduleCreateInfo.pNext = nullptr; 1158 1159 framework->GLSLtoSPV(stage, shader_code, spv); 1160 moduleCreateInfo.pCode = spv.data(); 1161 moduleCreateInfo.codeSize = spv.size() * sizeof(unsigned int); 1162 moduleCreateInfo.flags = 0; 1163 1164 err = init_try(*m_device, moduleCreateInfo); 1165 m_stage_info.module = handle(); 1166 assert(VK_SUCCESS == err); 1167} 1168 1169VkPipelineLayoutObj::VkPipelineLayoutObj(VkDeviceObj *device, 1170 const std::vector<const VkDescriptorSetLayoutObj *> &descriptor_layouts, 1171 const std::vector<VkPushConstantRange> &push_constant_ranges) { 1172 VkPipelineLayoutCreateInfo pl_ci = {}; 1173 pl_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; 1174 pl_ci.pushConstantRangeCount = static_cast<uint32_t>(push_constant_ranges.size()); 1175 pl_ci.pPushConstantRanges = push_constant_ranges.data(); 1176 1177 auto descriptor_layouts_unwrapped = MakeTestbindingHandles<const vk_testing::DescriptorSetLayout>(descriptor_layouts); 1178 1179 init(*device, pl_ci, descriptor_layouts_unwrapped); 1180} 1181 1182void VkPipelineLayoutObj::Reset() { *this = VkPipelineLayoutObj(); } 1183 1184VkPipelineObj::VkPipelineObj(VkDeviceObj *device) { 1185 m_device = device; 1186 1187 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 1188 m_vi_state.pNext = nullptr; 1189 m_vi_state.flags = 0; 1190 m_vi_state.vertexBindingDescriptionCount = 0; 1191 m_vi_state.pVertexBindingDescriptions = nullptr; 1192 m_vi_state.vertexAttributeDescriptionCount = 0; 1193 m_vi_state.pVertexAttributeDescriptions = nullptr; 1194 1195 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 1196 m_ia_state.pNext = nullptr; 1197 m_ia_state.flags = 0; 1198 m_ia_state.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; 1199 m_ia_state.primitiveRestartEnable = VK_FALSE; 1200 1201 m_te_state = nullptr; 1202 1203 m_vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; 1204 m_vp_state.pNext = VK_NULL_HANDLE; 1205 m_vp_state.flags = 0; 1206 m_vp_state.viewportCount = 1; 1207 m_vp_state.scissorCount = 1; 1208 m_vp_state.pViewports = nullptr; 1209 m_vp_state.pScissors = nullptr; 1210 1211 m_rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; 1212 m_rs_state.pNext = nullptr; 1213 m_rs_state.flags = 0; 1214 m_rs_state.depthClampEnable = VK_FALSE; 1215 m_rs_state.rasterizerDiscardEnable = VK_FALSE; 1216 m_rs_state.polygonMode = VK_POLYGON_MODE_FILL; 1217 m_rs_state.cullMode = VK_CULL_MODE_BACK_BIT; 1218 m_rs_state.frontFace = VK_FRONT_FACE_CLOCKWISE; 1219 m_rs_state.depthBiasEnable = VK_FALSE; 1220 m_rs_state.depthBiasConstantFactor = 0.0f; 1221 m_rs_state.depthBiasClamp = 0.0f; 1222 m_rs_state.depthBiasSlopeFactor = 0.0f; 1223 m_rs_state.lineWidth = 1.0f; 1224 1225 m_ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; 1226 m_ms_state.pNext = nullptr; 1227 m_ms_state.flags = 0; 1228 m_ms_state.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; 1229 m_ms_state.sampleShadingEnable = VK_FALSE; 1230 m_ms_state.minSampleShading = 0.0f; 1231 m_ms_state.pSampleMask = nullptr; 1232 m_ms_state.alphaToCoverageEnable = VK_FALSE; 1233 m_ms_state.alphaToOneEnable = VK_FALSE; 1234 1235 m_ds_state = nullptr; 1236 1237 memset(&m_cb_state, 0, sizeof(m_cb_state)); 1238 m_cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; 1239 m_cb_state.blendConstants[0] = 1.0f; 1240 m_cb_state.blendConstants[1] = 1.0f; 1241 m_cb_state.blendConstants[2] = 1.0f; 1242 m_cb_state.blendConstants[3] = 1.0f; 1243 1244 memset(&m_pd_state, 0, sizeof(m_pd_state)); 1245} 1246 1247void VkPipelineObj::AddShader(VkShaderObj *shader) { m_shaderStages.push_back(shader->GetStageCreateInfo()); } 1248 1249void VkPipelineObj::AddShader(VkPipelineShaderStageCreateInfo const &createInfo) { m_shaderStages.push_back(createInfo); } 1250 1251void VkPipelineObj::AddVertexInputAttribs(VkVertexInputAttributeDescription *vi_attrib, uint32_t count) { 1252 m_vi_state.pVertexAttributeDescriptions = vi_attrib; 1253 m_vi_state.vertexAttributeDescriptionCount = count; 1254} 1255 1256void VkPipelineObj::AddVertexInputBindings(VkVertexInputBindingDescription *vi_binding, uint32_t count) { 1257 m_vi_state.pVertexBindingDescriptions = vi_binding; 1258 m_vi_state.vertexBindingDescriptionCount = count; 1259} 1260 1261void VkPipelineObj::AddColorAttachment(uint32_t binding, const VkPipelineColorBlendAttachmentState &att) { 1262 if (binding + 1 > m_colorAttachments.size()) { 1263 m_colorAttachments.resize(binding + 1); 1264 } 1265 m_colorAttachments[binding] = att; 1266} 1267 1268void VkPipelineObj::SetDepthStencil(const VkPipelineDepthStencilStateCreateInfo *ds_state) { m_ds_state = ds_state; } 1269 1270void VkPipelineObj::SetViewport(const vector<VkViewport> viewports) { 1271 m_viewports = viewports; 1272 // If we explicitly set a null viewport, pass it through to create info 1273 // but preserve viewportCount because it musn't change 1274 if (m_viewports.size() == 0) { 1275 m_vp_state.pViewports = nullptr; 1276 } 1277} 1278 1279void VkPipelineObj::SetScissor(const vector<VkRect2D> scissors) { 1280 m_scissors = scissors; 1281 // If we explicitly set a null scissors, pass it through to create info 1282 // but preserve viewportCount because it musn't change 1283 if (m_scissors.size() == 0) { 1284 m_vp_state.pScissors = nullptr; 1285 } 1286} 1287 1288void VkPipelineObj::MakeDynamic(VkDynamicState state) { 1289 /* Only add a state once */ 1290 for (auto it = m_dynamic_state_enables.begin(); it != m_dynamic_state_enables.end(); it++) { 1291 if ((*it) == state) return; 1292 } 1293 m_dynamic_state_enables.push_back(state); 1294} 1295 1296void VkPipelineObj::SetMSAA(const VkPipelineMultisampleStateCreateInfo *ms_state) { m_ms_state = *ms_state; } 1297 1298void VkPipelineObj::SetInputAssembly(const VkPipelineInputAssemblyStateCreateInfo *ia_state) { m_ia_state = *ia_state; } 1299 1300void VkPipelineObj::SetRasterization(const VkPipelineRasterizationStateCreateInfo *rs_state) { m_rs_state = *rs_state; } 1301 1302void VkPipelineObj::SetTessellation(const VkPipelineTessellationStateCreateInfo *te_state) { m_te_state = te_state; } 1303 1304void VkPipelineObj::InitGraphicsPipelineCreateInfo(VkGraphicsPipelineCreateInfo *gp_ci) { 1305 gp_ci->stageCount = m_shaderStages.size(); 1306 gp_ci->pStages = m_shaderStages.size() ? m_shaderStages.data() : nullptr; 1307 1308 m_vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; 1309 gp_ci->pVertexInputState = &m_vi_state; 1310 1311 m_ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; 1312 gp_ci->pInputAssemblyState = &m_ia_state; 1313 1314 gp_ci->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; 1315 gp_ci->pNext = NULL; 1316 gp_ci->flags = 0; 1317 1318 m_cb_state.attachmentCount = m_colorAttachments.size(); 1319 m_cb_state.pAttachments = m_colorAttachments.data(); 1320 1321 if (m_viewports.size() > 0) { 1322 m_vp_state.viewportCount = m_viewports.size(); 1323 m_vp_state.pViewports = m_viewports.data(); 1324 } else { 1325 MakeDynamic(VK_DYNAMIC_STATE_VIEWPORT); 1326 } 1327 1328 if (m_scissors.size() > 0) { 1329 m_vp_state.scissorCount = m_scissors.size(); 1330 m_vp_state.pScissors = m_scissors.data(); 1331 } else { 1332 MakeDynamic(VK_DYNAMIC_STATE_SCISSOR); 1333 } 1334 1335 memset(&m_pd_state, 0, sizeof(m_pd_state)); 1336 if (m_dynamic_state_enables.size() > 0) { 1337 m_pd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 1338 m_pd_state.dynamicStateCount = m_dynamic_state_enables.size(); 1339 m_pd_state.pDynamicStates = m_dynamic_state_enables.data(); 1340 gp_ci->pDynamicState = &m_pd_state; 1341 } 1342 1343 gp_ci->subpass = 0; 1344 gp_ci->pViewportState = &m_vp_state; 1345 gp_ci->pRasterizationState = &m_rs_state; 1346 gp_ci->pMultisampleState = &m_ms_state; 1347 gp_ci->pDepthStencilState = m_ds_state; 1348 gp_ci->pColorBlendState = &m_cb_state; 1349 gp_ci->pTessellationState = m_te_state; 1350} 1351 1352VkResult VkPipelineObj::CreateVKPipeline(VkPipelineLayout layout, VkRenderPass render_pass, VkGraphicsPipelineCreateInfo *gp_ci) { 1353 VkGraphicsPipelineCreateInfo info = {}; 1354 1355 // if not given a CreateInfo, create and initialize a local one. 1356 if (gp_ci == nullptr) { 1357 gp_ci = &info; 1358 InitGraphicsPipelineCreateInfo(gp_ci); 1359 } 1360 1361 gp_ci->layout = layout; 1362 gp_ci->renderPass = render_pass; 1363 1364 return init_try(*m_device, *gp_ci); 1365} 1366 1367VkCommandBufferObj::VkCommandBufferObj(VkDeviceObj *device, VkCommandPoolObj *pool, VkCommandBufferLevel level) { 1368 m_device = device; 1369 auto create_info = vk_testing::CommandBuffer::create_info(pool->handle()); 1370 create_info.level = level; 1371 init(*device, create_info); 1372} 1373 1374void VkCommandBufferObj::PipelineBarrier(VkPipelineStageFlags src_stages, VkPipelineStageFlags dest_stages, 1375 VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, 1376 const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, 1377 const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, 1378 const VkImageMemoryBarrier *pImageMemoryBarriers) { 1379 vkCmdPipelineBarrier(handle(), src_stages, dest_stages, dependencyFlags, memoryBarrierCount, pMemoryBarriers, 1380 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 1381} 1382 1383void VkCommandBufferObj::ClearAllBuffers(const vector<VkImageObj *> &color_objs, VkClearColorValue clear_color, 1384 VkDepthStencilObj *depth_stencil_obj, float depth_clear_value, 1385 uint32_t stencil_clear_value) { 1386 // whatever we want to do, we do it to the whole buffer 1387 VkImageSubresourceRange subrange = {}; 1388 // srRange.aspectMask to be set later 1389 subrange.baseMipLevel = 0; 1390 // TODO: Mali device crashing with VK_REMAINING_MIP_LEVELS 1391 subrange.levelCount = 1; // VK_REMAINING_MIP_LEVELS; 1392 subrange.baseArrayLayer = 0; 1393 // TODO: Mesa crashing with VK_REMAINING_ARRAY_LAYERS 1394 subrange.layerCount = 1; // VK_REMAINING_ARRAY_LAYERS; 1395 1396 const VkImageLayout clear_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; 1397 1398 for (const auto &color_obj : color_objs) { 1399 subrange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; 1400 color_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED); 1401 color_obj->SetLayout(this, subrange.aspectMask, clear_layout); 1402 ClearColorImage(color_obj->image(), clear_layout, &clear_color, 1, &subrange); 1403 } 1404 1405 if (depth_stencil_obj && depth_stencil_obj->Initialized()) { 1406 subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; 1407 if (FormatIsDepthOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; 1408 if (FormatIsStencilOnly(depth_stencil_obj->format())) subrange.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT; 1409 1410 depth_stencil_obj->Layout(VK_IMAGE_LAYOUT_UNDEFINED); 1411 depth_stencil_obj->SetLayout(this, subrange.aspectMask, clear_layout); 1412 1413 VkClearDepthStencilValue clear_value = {depth_clear_value, stencil_clear_value}; 1414 ClearDepthStencilImage(depth_stencil_obj->handle(), clear_layout, &clear_value, 1, &subrange); 1415 } 1416} 1417 1418void VkCommandBufferObj::FillBuffer(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize fill_size, uint32_t data) { 1419 vkCmdFillBuffer(handle(), buffer, offset, fill_size, data); 1420} 1421 1422void VkCommandBufferObj::UpdateBuffer(VkBuffer buffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) { 1423 vkCmdUpdateBuffer(handle(), buffer, dstOffset, dataSize, pData); 1424} 1425 1426void VkCommandBufferObj::CopyImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, 1427 uint32_t regionCount, const VkImageCopy *pRegions) { 1428 vkCmdCopyImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 1429} 1430 1431void VkCommandBufferObj::ResolveImage(VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, 1432 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) { 1433 vkCmdResolveImage(handle(), srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 1434} 1435 1436void VkCommandBufferObj::ClearColorImage(VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, 1437 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 1438 vkCmdClearColorImage(handle(), image, imageLayout, pColor, rangeCount, pRanges); 1439} 1440 1441void VkCommandBufferObj::ClearDepthStencilImage(VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pColor, 1442 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 1443 vkCmdClearDepthStencilImage(handle(), image, imageLayout, pColor, rangeCount, pRanges); 1444} 1445 1446void VkCommandBufferObj::PrepareAttachments(const vector<VkImageObj *> &color_atts, VkDepthStencilObj *depth_stencil_att) { 1447 for (const auto &color_att : color_atts) { 1448 color_att->SetLayout(this, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); 1449 } 1450 1451 if (depth_stencil_att && depth_stencil_att->Initialized()) { 1452 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; 1453 if (FormatIsDepthOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_DEPTH_BIT; 1454 if (FormatIsStencilOnly(depth_stencil_att->Format())) aspect = VK_IMAGE_ASPECT_STENCIL_BIT; 1455 1456 depth_stencil_att->SetLayout(this, aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); 1457 } 1458} 1459 1460void VkCommandBufferObj::BeginRenderPass(const VkRenderPassBeginInfo &info) { 1461 vkCmdBeginRenderPass(handle(), &info, VK_SUBPASS_CONTENTS_INLINE); 1462} 1463 1464void VkCommandBufferObj::EndRenderPass() { vkCmdEndRenderPass(handle()); } 1465 1466void VkCommandBufferObj::SetViewport(uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) { 1467 vkCmdSetViewport(handle(), firstViewport, viewportCount, pViewports); 1468} 1469 1470void VkCommandBufferObj::SetStencilReference(VkStencilFaceFlags faceMask, uint32_t reference) { 1471 vkCmdSetStencilReference(handle(), faceMask, reference); 1472} 1473 1474void VkCommandBufferObj::DrawIndexed(uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, 1475 uint32_t firstInstance) { 1476 vkCmdDrawIndexed(handle(), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); 1477} 1478 1479void VkCommandBufferObj::Draw(uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) { 1480 vkCmdDraw(handle(), vertexCount, instanceCount, firstVertex, firstInstance); 1481} 1482 1483void VkCommandBufferObj::QueueCommandBuffer(bool checkSuccess) { 1484 VkFence nullFence = {VK_NULL_HANDLE}; 1485 QueueCommandBuffer(nullFence, checkSuccess); 1486} 1487 1488void VkCommandBufferObj::QueueCommandBuffer(VkFence fence, bool checkSuccess) { 1489 VkResult err = VK_SUCCESS; 1490 1491 // submit the command buffer to the universal queue 1492 VkSubmitInfo submit_info; 1493 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; 1494 submit_info.pNext = NULL; 1495 submit_info.waitSemaphoreCount = 0; 1496 submit_info.pWaitSemaphores = NULL; 1497 submit_info.pWaitDstStageMask = NULL; 1498 submit_info.commandBufferCount = 1; 1499 submit_info.pCommandBuffers = &handle(); 1500 submit_info.signalSemaphoreCount = 0; 1501 submit_info.pSignalSemaphores = NULL; 1502 1503 err = vkQueueSubmit(m_device->m_queue, 1, &submit_info, fence); 1504 if (checkSuccess) { 1505 ASSERT_VK_SUCCESS(err); 1506 } 1507 1508 err = vkQueueWaitIdle(m_device->m_queue); 1509 if (checkSuccess) { 1510 ASSERT_VK_SUCCESS(err); 1511 } 1512 1513 // Wait for work to finish before cleaning up. 1514 vkDeviceWaitIdle(m_device->device()); 1515} 1516 1517void VkCommandBufferObj::BindDescriptorSet(VkDescriptorSetObj &descriptorSet) { 1518 VkDescriptorSet set_obj = descriptorSet.GetDescriptorSetHandle(); 1519 1520 // bind pipeline, vertex buffer (descriptor set) and WVP (dynamic buffer view) 1521 if (set_obj) { 1522 vkCmdBindDescriptorSets(handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, descriptorSet.GetPipelineLayout(), 0, 1, &set_obj, 0, 1523 NULL); 1524 } 1525} 1526 1527void VkCommandBufferObj::BindVertexBuffer(VkConstantBufferObj *vertexBuffer, VkDeviceSize offset, uint32_t binding) { 1528 vkCmdBindVertexBuffers(handle(), binding, 1, &vertexBuffer->handle(), &offset); 1529} 1530 1531VkCommandPoolObj::VkCommandPoolObj(VkDeviceObj *device, uint32_t queue_family_index, VkCommandPoolCreateFlags flags) { 1532 init(*device, vk_testing::CommandPool::create_info(queue_family_index, flags)); 1533} 1534 1535bool VkDepthStencilObj::Initialized() { return m_initialized; } 1536VkDepthStencilObj::VkDepthStencilObj(VkDeviceObj *device) : VkImageObj(device) { m_initialized = false; } 1537 1538VkImageView *VkDepthStencilObj::BindInfo() { return &m_attachmentBindInfo; } 1539 1540VkFormat VkDepthStencilObj::Format() const { return this->m_depth_stencil_fmt; } 1541 1542void VkDepthStencilObj::Init(VkDeviceObj *device, int32_t width, int32_t height, VkFormat format, VkImageUsageFlags usage) { 1543 VkImageViewCreateInfo view_info = {}; 1544 1545 m_device = device; 1546 m_initialized = true; 1547 m_depth_stencil_fmt = format; 1548 1549 /* create image */ 1550 VkImageObj::Init(width, height, 1, m_depth_stencil_fmt, usage, VK_IMAGE_TILING_OPTIMAL); 1551 1552 VkImageAspectFlags aspect = VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT; 1553 if (FormatIsDepthOnly(format)) 1554 aspect = VK_IMAGE_ASPECT_DEPTH_BIT; 1555 else if (FormatIsStencilOnly(format)) 1556 aspect = VK_IMAGE_ASPECT_STENCIL_BIT; 1557 1558 SetLayout(aspect, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); 1559 1560 view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; 1561 view_info.pNext = NULL; 1562 view_info.image = VK_NULL_HANDLE; 1563 view_info.subresourceRange.aspectMask = aspect; 1564 view_info.subresourceRange.baseMipLevel = 0; 1565 view_info.subresourceRange.levelCount = 1; 1566 view_info.subresourceRange.baseArrayLayer = 0; 1567 view_info.subresourceRange.layerCount = 1; 1568 view_info.flags = 0; 1569 view_info.format = m_depth_stencil_fmt; 1570 view_info.image = handle(); 1571 view_info.viewType = VK_IMAGE_VIEW_TYPE_2D; 1572 m_imageView.init(*m_device, view_info); 1573 1574 m_attachmentBindInfo = m_imageView.handle(); 1575} 1576