buffer_validation.cpp revision dab32891b91206a5bef7a3929b781e44fc1b7268
1/* Copyright (c) 2015-2017 The Khronos Group Inc. 2 * Copyright (c) 2015-2017 Valve Corporation 3 * Copyright (c) 2015-2017 LunarG, Inc. 4 * Copyright (C) 2015-2017 Google Inc. 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 * 18 * Author: Mark Lobodzinski <mark@lunarg.com> 19 * Author: Dave Houlton <daveh@lunarg.com> 20 */ 21 22// Allow use of STL min and max functions in Windows 23#define NOMINMAX 24 25#include <inttypes.h> 26#include <sstream> 27#include <string> 28 29#include "vk_enum_string_helper.h" 30#include "vk_layer_data.h" 31#include "vk_layer_utils.h" 32#include "vk_layer_logging.h" 33 34#include "buffer_validation.h" 35 36// TODO: remove on NDK update (r15 will probably have proper STL impl) 37#ifdef __ANDROID__ 38namespace std { 39 40template <typename T> 41std::string to_string(T var) { 42 std::ostringstream ss; 43 ss << var; 44 return ss.str(); 45} 46} 47#endif 48 49void SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const VkImageLayout &layout) { 50 if (pCB->imageLayoutMap.find(imgpair) != pCB->imageLayoutMap.end()) { 51 pCB->imageLayoutMap[imgpair].layout = layout; 52 } else { 53 assert(imgpair.hasSubresource); 54 IMAGE_CMD_BUF_LAYOUT_NODE node; 55 if (!FindCmdBufLayout(device_data, pCB, imgpair.image, imgpair.subresource, node)) { 56 node.initialLayout = layout; 57 } 58 SetLayout(device_data, pCB, imgpair, {node.initialLayout, layout}); 59 } 60} 61template <class OBJECT, class LAYOUT> 62void SetLayout(layer_data *device_data, OBJECT *pObject, VkImage image, VkImageSubresource range, const LAYOUT &layout) { 63 ImageSubresourcePair imgpair = {image, true, range}; 64 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 65 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 66 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 67 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 68} 69 70template <class OBJECT, class LAYOUT> 71void SetLayout(layer_data *device_data, OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout, 72 VkImageAspectFlags aspectMask) { 73 if (imgpair.subresource.aspectMask & aspectMask) { 74 imgpair.subresource.aspectMask = aspectMask; 75 SetLayout(device_data, pObject, imgpair, layout); 76 } 77} 78 79// Set the layout in supplied map 80void SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 81 VkImageLayout layout) { 82 imageLayoutMap[imgpair].layout = layout; 83} 84 85bool FindLayoutVerifyNode(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, ImageSubresourcePair imgpair, 86 IMAGE_CMD_BUF_LAYOUT_NODE &node, const VkImageAspectFlags aspectMask) { 87 const debug_report_data *report_data = core_validation::GetReportData(device_data); 88 89 if (!(imgpair.subresource.aspectMask & aspectMask)) { 90 return false; 91 } 92 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask; 93 imgpair.subresource.aspectMask = aspectMask; 94 auto imgsubIt = pCB->imageLayoutMap.find(imgpair); 95 if (imgsubIt == pCB->imageLayoutMap.end()) { 96 return false; 97 } 98 if (node.layout != VK_IMAGE_LAYOUT_MAX_ENUM && node.layout != imgsubIt->second.layout) { 99 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 100 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 101 "Cannot query for VkImage 0x%" PRIx64 " layout when combined aspect mask %d has multiple layout types: %s and %s", 102 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(node.layout), 103 string_VkImageLayout(imgsubIt->second.layout)); 104 } 105 if (node.initialLayout != VK_IMAGE_LAYOUT_MAX_ENUM && node.initialLayout != imgsubIt->second.initialLayout) { 106 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 107 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 108 "Cannot query for VkImage 0x%" PRIx64 109 " layout when combined aspect mask %d has multiple initial layout types: %s and %s", 110 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(node.initialLayout), 111 string_VkImageLayout(imgsubIt->second.initialLayout)); 112 } 113 node = imgsubIt->second; 114 return true; 115} 116 117bool FindLayoutVerifyLayout(layer_data const *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout, 118 const VkImageAspectFlags aspectMask) { 119 if (!(imgpair.subresource.aspectMask & aspectMask)) { 120 return false; 121 } 122 const debug_report_data *report_data = core_validation::GetReportData(device_data); 123 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask; 124 imgpair.subresource.aspectMask = aspectMask; 125 auto imgsubIt = (*core_validation::GetImageLayoutMap(device_data)).find(imgpair); 126 if (imgsubIt == (*core_validation::GetImageLayoutMap(device_data)).end()) { 127 return false; 128 } 129 if (layout != VK_IMAGE_LAYOUT_MAX_ENUM && layout != imgsubIt->second.layout) { 130 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image), 131 __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS", 132 "Cannot query for VkImage 0x%" PRIx64 " layout when combined aspect mask %d has multiple layout types: %s and %s", 133 HandleToUint64(imgpair.image), oldAspectMask, string_VkImageLayout(layout), 134 string_VkImageLayout(imgsubIt->second.layout)); 135 } 136 layout = imgsubIt->second.layout; 137 return true; 138} 139 140// Find layout(s) on the command buffer level 141bool FindCmdBufLayout(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, VkImage image, VkImageSubresource range, 142 IMAGE_CMD_BUF_LAYOUT_NODE &node) { 143 ImageSubresourcePair imgpair = {image, true, range}; 144 node = IMAGE_CMD_BUF_LAYOUT_NODE(VK_IMAGE_LAYOUT_MAX_ENUM, VK_IMAGE_LAYOUT_MAX_ENUM); 145 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_COLOR_BIT); 146 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_DEPTH_BIT); 147 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_STENCIL_BIT); 148 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_METADATA_BIT); 149 if (node.layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 150 imgpair = {image, false, VkImageSubresource()}; 151 auto imgsubIt = pCB->imageLayoutMap.find(imgpair); 152 if (imgsubIt == pCB->imageLayoutMap.end()) return false; 153 // TODO: This is ostensibly a find function but it changes state here 154 node = imgsubIt->second; 155 } 156 return true; 157} 158 159// Find layout(s) on the global level 160bool FindGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout) { 161 layout = VK_IMAGE_LAYOUT_MAX_ENUM; 162 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 163 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 164 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 165 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 166 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 167 imgpair = {imgpair.image, false, VkImageSubresource()}; 168 auto imgsubIt = (*core_validation::GetImageLayoutMap(device_data)).find(imgpair); 169 if (imgsubIt == (*core_validation::GetImageLayoutMap(device_data)).end()) return false; 170 layout = imgsubIt->second.layout; 171 } 172 return true; 173} 174 175bool FindLayouts(layer_data *device_data, VkImage image, std::vector<VkImageLayout> &layouts) { 176 auto sub_data = (*core_validation::GetImageSubresourceMap(device_data)).find(image); 177 if (sub_data == (*core_validation::GetImageSubresourceMap(device_data)).end()) return false; 178 auto image_state = GetImageState(device_data, image); 179 if (!image_state) return false; 180 bool ignoreGlobal = false; 181 // TODO: Make this robust for >1 aspect mask. Now it will just say ignore potential errors in this case. 182 if (sub_data->second.size() >= (image_state->createInfo.arrayLayers * image_state->createInfo.mipLevels + 1)) { 183 ignoreGlobal = true; 184 } 185 for (auto imgsubpair : sub_data->second) { 186 if (ignoreGlobal && !imgsubpair.hasSubresource) continue; 187 auto img_data = (*core_validation::GetImageLayoutMap(device_data)).find(imgsubpair); 188 if (img_data != (*core_validation::GetImageLayoutMap(device_data)).end()) { 189 layouts.push_back(img_data->second.layout); 190 } 191 } 192 return true; 193} 194bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 195 VkImageLayout &layout, const VkImageAspectFlags aspectMask) { 196 if (!(imgpair.subresource.aspectMask & aspectMask)) { 197 return false; 198 } 199 imgpair.subresource.aspectMask = aspectMask; 200 auto imgsubIt = imageLayoutMap.find(imgpair); 201 if (imgsubIt == imageLayoutMap.end()) { 202 return false; 203 } 204 layout = imgsubIt->second.layout; 205 return true; 206} 207 208// find layout in supplied map 209bool FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap, ImageSubresourcePair imgpair, 210 VkImageLayout &layout) { 211 layout = VK_IMAGE_LAYOUT_MAX_ENUM; 212 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT); 213 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT); 214 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT); 215 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT); 216 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) { 217 imgpair = {imgpair.image, false, VkImageSubresource()}; 218 auto imgsubIt = imageLayoutMap.find(imgpair); 219 if (imgsubIt == imageLayoutMap.end()) return false; 220 layout = imgsubIt->second.layout; 221 } 222 return true; 223} 224 225// Set the layout on the global level 226void SetGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, const VkImageLayout &layout) { 227 VkImage &image = imgpair.image; 228 (*core_validation::GetImageLayoutMap(device_data))[imgpair].layout = layout; 229 auto &image_subresources = (*core_validation::GetImageSubresourceMap(device_data))[image]; 230 auto subresource = std::find(image_subresources.begin(), image_subresources.end(), imgpair); 231 if (subresource == image_subresources.end()) { 232 image_subresources.push_back(imgpair); 233 } 234} 235 236// Set the layout on the cmdbuf level 237void SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const IMAGE_CMD_BUF_LAYOUT_NODE &node) { 238 pCB->imageLayoutMap[imgpair] = node; 239} 240// Set image layout for given VkImageSubresourceRange struct 241void SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state, 242 VkImageSubresourceRange image_subresource_range, const VkImageLayout &layout) { 243 assert(image_state); 244 for (uint32_t level_index = 0; level_index < image_subresource_range.levelCount; ++level_index) { 245 uint32_t level = image_subresource_range.baseMipLevel + level_index; 246 for (uint32_t layer_index = 0; layer_index < image_subresource_range.layerCount; layer_index++) { 247 uint32_t layer = image_subresource_range.baseArrayLayer + layer_index; 248 VkImageSubresource sub = {image_subresource_range.aspectMask, level, layer}; 249 // TODO: If ImageView was created with depth or stencil, transition both layouts as the aspectMask is ignored and both 250 // are used. Verify that the extra implicit layout is OK for descriptor set layout validation 251 if (image_subresource_range.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) { 252 if (FormatIsDepthAndStencil(image_state->createInfo.format)) { 253 sub.aspectMask |= (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT); 254 } 255 } 256 SetLayout(device_data, cb_node, image_state->image, sub, layout); 257 } 258 } 259} 260// Set image layout for given VkImageSubresourceLayers struct 261void SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state, 262 VkImageSubresourceLayers image_subresource_layers, const VkImageLayout &layout) { 263 // Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct 264 VkImageSubresourceRange image_subresource_range; 265 image_subresource_range.aspectMask = image_subresource_layers.aspectMask; 266 image_subresource_range.baseArrayLayer = image_subresource_layers.baseArrayLayer; 267 image_subresource_range.layerCount = image_subresource_layers.layerCount; 268 image_subresource_range.baseMipLevel = image_subresource_layers.mipLevel; 269 image_subresource_range.levelCount = 1; 270 SetImageLayout(device_data, cb_node, image_state, image_subresource_range, layout); 271} 272// Set image layout for all slices of an image view 273void SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImageView imageView, const VkImageLayout &layout) { 274 auto view_state = GetImageViewState(device_data, imageView); 275 assert(view_state); 276 277 SetImageLayout(device_data, cb_node, GetImageState(device_data, view_state->create_info.image), 278 view_state->create_info.subresourceRange, layout); 279} 280 281bool VerifyFramebufferAndRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, 282 const VkRenderPassBeginInfo *pRenderPassBegin, 283 const FRAMEBUFFER_STATE *framebuffer_state) { 284 bool skip = false; 285 auto const pRenderPassInfo = GetRenderPassState(device_data, pRenderPassBegin->renderPass)->createInfo.ptr(); 286 auto const &framebufferInfo = framebuffer_state->createInfo; 287 const auto report_data = core_validation::GetReportData(device_data); 288 if (pRenderPassInfo->attachmentCount != framebufferInfo.attachmentCount) { 289 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 290 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS", 291 "You cannot start a render pass using a framebuffer " 292 "with a different number of attachments."); 293 } 294 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) { 295 const VkImageView &image_view = framebufferInfo.pAttachments[i]; 296 auto view_state = GetImageViewState(device_data, image_view); 297 assert(view_state); 298 const VkImage &image = view_state->create_info.image; 299 const VkImageSubresourceRange &subRange = view_state->create_info.subresourceRange; 300 auto initial_layout = pRenderPassInfo->pAttachments[i].initialLayout; 301 // TODO: Do not iterate over every possibility - consolidate where possible 302 for (uint32_t j = 0; j < subRange.levelCount; j++) { 303 uint32_t level = subRange.baseMipLevel + j; 304 for (uint32_t k = 0; k < subRange.layerCount; k++) { 305 uint32_t layer = subRange.baseArrayLayer + k; 306 VkImageSubresource sub = {subRange.aspectMask, level, layer}; 307 IMAGE_CMD_BUF_LAYOUT_NODE node; 308 if (!FindCmdBufLayout(device_data, pCB, image, sub, node)) { 309 // Missing layouts will be added during state update 310 continue; 311 } 312 if (initial_layout != VK_IMAGE_LAYOUT_UNDEFINED && initial_layout != node.layout) { 313 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 314 __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS", 315 "You cannot start a render pass using attachment %u " 316 "where the render pass initial layout is %s and the previous " 317 "known layout of the attachment is %s. The layouts must match, or " 318 "the render pass initial layout for the attachment must be " 319 "VK_IMAGE_LAYOUT_UNDEFINED", 320 i, string_VkImageLayout(initial_layout), string_VkImageLayout(node.layout)); 321 } 322 } 323 } 324 } 325 return skip; 326} 327 328void TransitionAttachmentRefLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, FRAMEBUFFER_STATE *pFramebuffer, 329 VkAttachmentReference ref) { 330 if (ref.attachment != VK_ATTACHMENT_UNUSED) { 331 auto image_view = pFramebuffer->createInfo.pAttachments[ref.attachment]; 332 SetImageViewLayout(device_data, pCB, image_view, ref.layout); 333 } 334} 335 336void TransitionSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const RENDER_PASS_STATE *render_pass_state, 337 const int subpass_index, FRAMEBUFFER_STATE *framebuffer_state) { 338 assert(render_pass_state); 339 340 if (framebuffer_state) { 341 auto const &subpass = render_pass_state->createInfo.pSubpasses[subpass_index]; 342 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) { 343 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pInputAttachments[j]); 344 } 345 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) { 346 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pColorAttachments[j]); 347 } 348 if (subpass.pDepthStencilAttachment) { 349 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, *subpass.pDepthStencilAttachment); 350 } 351 } 352} 353 354bool ValidateImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier, 355 uint32_t level, uint32_t layer, VkImageAspectFlags aspect) { 356 if (!(mem_barrier->subresourceRange.aspectMask & aspect)) { 357 return false; 358 } 359 VkImageSubresource sub = {aspect, level, layer}; 360 IMAGE_CMD_BUF_LAYOUT_NODE node; 361 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) { 362 return false; 363 } 364 bool skip = false; 365 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 366 // TODO: Set memory invalid which is in mem_tracker currently 367 } else if (node.layout != mem_barrier->oldLayout) { 368 skip |= 369 log_msg(core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 370 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(pCB->commandBuffer), __LINE__, 371 DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 372 "For image 0x%" PRIxLEAST64 " you cannot transition the layout of aspect %d from %s when current layout is %s.", 373 HandleToUint64(mem_barrier->image), aspect, string_VkImageLayout(mem_barrier->oldLayout), 374 string_VkImageLayout(node.layout)); 375 } 376 return skip; 377} 378 379// Transition the layout state for renderpass attachments based on the BeginRenderPass() call. This includes: 380// 1. Transition into initialLayout state 381// 2. Transition from initialLayout to layout used in subpass 0 382void TransitionBeginRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state, const RENDER_PASS_STATE *render_pass_state, 383 FRAMEBUFFER_STATE *framebuffer_state) { 384 // First transition into initialLayout 385 auto const rpci = render_pass_state->createInfo.ptr(); 386 for (uint32_t i = 0; i < rpci->attachmentCount; ++i) { 387 VkImageView image_view = framebuffer_state->createInfo.pAttachments[i]; 388 SetImageViewLayout(device_data, cb_state, image_view, rpci->pAttachments[i].initialLayout); 389 } 390 // Now transition for first subpass (index 0) 391 TransitionSubpassLayouts(device_data, cb_state, render_pass_state, 0, framebuffer_state); 392} 393 394void TransitionImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier, 395 uint32_t level, uint32_t layer, VkImageAspectFlags aspect) { 396 if (!(mem_barrier->subresourceRange.aspectMask & aspect)) { 397 return; 398 } 399 VkImageSubresource sub = {aspect, level, layer}; 400 IMAGE_CMD_BUF_LAYOUT_NODE node; 401 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) { 402 SetLayout(device_data, pCB, mem_barrier->image, sub, 403 IMAGE_CMD_BUF_LAYOUT_NODE(mem_barrier->oldLayout, mem_barrier->newLayout)); 404 return; 405 } 406 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 407 // TODO: Set memory invalid 408 } 409 SetLayout(device_data, pCB, mem_barrier->image, sub, mem_barrier->newLayout); 410} 411 412bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) { 413 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != 0) { 414 if (!FormatIsColor(format)) return false; 415 } 416 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != 0) { 417 if (!FormatHasDepth(format)) return false; 418 } 419 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) { 420 if (!FormatHasStencil(format)) return false; 421 } 422 return true; 423} 424 425// Verify an ImageMemoryBarrier's old/new ImageLayouts are compatible with the Image's ImageUsageFlags. 426bool ValidateBarrierLayoutToImageUsage(layer_data *device_data, const VkImageMemoryBarrier *img_barrier, bool new_not_old, 427 VkImageUsageFlags usage_flags, const char *func_name) { 428 const auto report_data = core_validation::GetReportData(device_data); 429 bool skip = false; 430 const VkImageLayout layout = (new_not_old) ? img_barrier->newLayout : img_barrier->oldLayout; 431 UNIQUE_VALIDATION_ERROR_CODE msg_code = VALIDATION_ERROR_UNDEFINED; // sentinel value meaning "no error" 432 433 switch (layout) { 434 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: 435 if ((usage_flags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0) { 436 msg_code = VALIDATION_ERROR_0a000970; 437 } 438 break; 439 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: 440 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) { 441 msg_code = VALIDATION_ERROR_0a000972; 442 } 443 break; 444 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 445 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) { 446 msg_code = VALIDATION_ERROR_0a000974; 447 } 448 break; 449 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: 450 if ((usage_flags & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) == 0) { 451 msg_code = VALIDATION_ERROR_0a000976; 452 } 453 break; 454 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: 455 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) == 0) { 456 msg_code = VALIDATION_ERROR_0a000978; 457 } 458 break; 459 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: 460 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) == 0) { 461 msg_code = VALIDATION_ERROR_0a00097a; 462 } 463 break; 464 default: 465 // Other VkImageLayout values do not have VUs defined in this context. 466 break; 467 } 468 469 if (msg_code != VALIDATION_ERROR_UNDEFINED) { 470 skip |= 471 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 472 HandleToUint64(img_barrier->image), __LINE__, msg_code, "DS", 473 "%s: Image barrier 0x%p %sLayout=%s is not compatible with image 0x%" PRIx64 " usage flags 0x%" PRIx32 ". %s", 474 func_name, img_barrier, ((new_not_old) ? "new" : "old"), string_VkImageLayout(layout), 475 HandleToUint64(img_barrier->image), usage_flags, validation_error_map[msg_code]); 476 } 477 return skip; 478} 479 480// Verify image barriers are compatible with the images they reference. 481bool ValidateBarriersToImages(layer_data *device_data, VkCommandBuffer cmdBuffer, uint32_t imageMemoryBarrierCount, 482 const VkImageMemoryBarrier *pImageMemoryBarriers, const char *func_name) { 483 GLOBAL_CB_NODE *pCB = GetCBNode(device_data, cmdBuffer); 484 bool skip = false; 485 486 for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) { 487 auto img_barrier = &pImageMemoryBarriers[i]; 488 if (!img_barrier) continue; 489 490 auto image_state = GetImageState(device_data, img_barrier->image); 491 if (image_state) { 492 VkImageUsageFlags usage_flags = image_state->createInfo.usage; 493 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, false, usage_flags, func_name); 494 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, true, usage_flags, func_name); 495 496 // Make sure layout is able to be transitioned, currently only presented shared presentable images are locked 497 if (image_state->layout_locked) { 498 // TODO: Add unique id for error when available 499 skip |= log_msg(core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 500 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 0, "DS", 501 "Attempting to transition shared presentable image 0x%" PRIxLEAST64 502 " from layout %s to layout %s, but image has already been presented and cannot have its layout transitioned.", 503 reinterpret_cast<const uint64_t &>(img_barrier->image), string_VkImageLayout(img_barrier->oldLayout), 504 string_VkImageLayout(img_barrier->newLayout)); 505 } 506 } 507 508 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, img_barrier->image)->createInfo); 509 // For a Depth/Stencil image both aspects MUST be set 510 if (FormatIsDepthAndStencil(image_create_info->format)) { 511 auto const aspect_mask = img_barrier->subresourceRange.aspectMask; 512 auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT; 513 if ((aspect_mask & ds_mask) != (ds_mask)) { 514 skip |= 515 log_msg(core_validation::GetReportData(device_data), VK_DEBUG_REPORT_ERROR_BIT_EXT, 516 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(img_barrier->image), __LINE__, 517 VALIDATION_ERROR_0a00096e, "DS", 518 "%s: Image barrier 0x%p references image 0x%" PRIx64 519 " of format %s that must have the depth and stencil aspects set, but its " 520 "aspectMask is 0x%" PRIx32 ". %s", 521 func_name, img_barrier, HandleToUint64(img_barrier->image), string_VkFormat(image_create_info->format), 522 aspect_mask, validation_error_map[VALIDATION_ERROR_0a00096e]); 523 } 524 } 525 uint32_t level_count = ResolveRemainingLevels(&img_barrier->subresourceRange, image_create_info->mipLevels); 526 uint32_t layer_count = ResolveRemainingLayers(&img_barrier->subresourceRange, image_create_info->arrayLayers); 527 528 for (uint32_t j = 0; j < level_count; j++) { 529 uint32_t level = img_barrier->subresourceRange.baseMipLevel + j; 530 for (uint32_t k = 0; k < layer_count; k++) { 531 uint32_t layer = img_barrier->subresourceRange.baseArrayLayer + k; 532 skip |= ValidateImageAspectLayout(device_data, pCB, img_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT); 533 skip |= ValidateImageAspectLayout(device_data, pCB, img_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT); 534 skip |= ValidateImageAspectLayout(device_data, pCB, img_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT); 535 skip |= ValidateImageAspectLayout(device_data, pCB, img_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT); 536 } 537 } 538 } 539 return skip; 540} 541 542void TransitionImageLayouts(layer_data *device_data, VkCommandBuffer cmdBuffer, uint32_t memBarrierCount, 543 const VkImageMemoryBarrier *pImgMemBarriers) { 544 GLOBAL_CB_NODE *pCB = GetCBNode(device_data, cmdBuffer); 545 546 for (uint32_t i = 0; i < memBarrierCount; ++i) { 547 auto mem_barrier = &pImgMemBarriers[i]; 548 if (!mem_barrier) continue; 549 550 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, mem_barrier->image)->createInfo); 551 uint32_t level_count = ResolveRemainingLevels(&mem_barrier->subresourceRange, image_create_info->mipLevels); 552 uint32_t layer_count = ResolveRemainingLayers(&mem_barrier->subresourceRange, image_create_info->arrayLayers); 553 554 for (uint32_t j = 0; j < level_count; j++) { 555 uint32_t level = mem_barrier->subresourceRange.baseMipLevel + j; 556 for (uint32_t k = 0; k < layer_count; k++) { 557 uint32_t layer = mem_barrier->subresourceRange.baseArrayLayer + k; 558 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT); 559 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT); 560 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT); 561 TransitionImageAspectLayout(device_data, pCB, mem_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT); 562 } 563 } 564 } 565} 566 567bool VerifyImageLayout(layer_data const *device_data, GLOBAL_CB_NODE const *cb_node, IMAGE_STATE *image_state, 568 VkImageSubresourceLayers subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout, 569 const char *caller, UNIQUE_VALIDATION_ERROR_CODE msg_code, bool *error) { 570 const auto report_data = core_validation::GetReportData(device_data); 571 const auto image = image_state->image; 572 bool skip = false; 573 574 for (uint32_t i = 0; i < subLayers.layerCount; ++i) { 575 uint32_t layer = i + subLayers.baseArrayLayer; 576 VkImageSubresource sub = {subLayers.aspectMask, subLayers.mipLevel, layer}; 577 IMAGE_CMD_BUF_LAYOUT_NODE node; 578 if (FindCmdBufLayout(device_data, cb_node, image, sub, node)) { 579 if (node.layout != explicit_layout) { 580 *error = true; 581 // TODO: Improve log message in the next pass 582 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 583 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 584 "%s: Cannot use image 0x%" PRIxLEAST64 585 " with specific layout %s that doesn't match the actual current layout %s.", 586 caller, HandleToUint64(image), string_VkImageLayout(explicit_layout), 587 string_VkImageLayout(node.layout)); 588 } 589 } 590 } 591 // If optimal_layout is not UNDEFINED, check that layout matches optimal for this case 592 if ((VK_IMAGE_LAYOUT_UNDEFINED != optimal_layout) && (explicit_layout != optimal_layout)) { 593 if (VK_IMAGE_LAYOUT_GENERAL == explicit_layout) { 594 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 595 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning. 596 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 597 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(cb_node->commandBuffer), __LINE__, 598 DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 599 "%s: For optimal performance image 0x%" PRIxLEAST64 " layout should be %s instead of GENERAL.", 600 caller, HandleToUint64(image), string_VkImageLayout(optimal_layout)); 601 } 602 } else if (GetDeviceExtensions(device_data)->vk_khr_shared_presentable_image) { 603 if (image_state->shared_presentable) { 604 if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != explicit_layout) { 605 // TODO: Add unique error id when available. 606 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 607 __LINE__, msg_code, "DS", 608 "Layout for shared presentable image is %s but must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.", 609 string_VkImageLayout(optimal_layout)); 610 } 611 } 612 } else { 613 *error = true; 614 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 615 HandleToUint64(cb_node->commandBuffer), __LINE__, msg_code, "DS", 616 "%s: Layout for image 0x%" PRIxLEAST64 " is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL. %s", 617 caller, HandleToUint64(image), string_VkImageLayout(explicit_layout), 618 string_VkImageLayout(optimal_layout), validation_error_map[msg_code]); 619 } 620 } 621 return skip; 622} 623 624void TransitionFinalSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin, 625 FRAMEBUFFER_STATE *framebuffer_state) { 626 auto renderPass = GetRenderPassState(device_data, pRenderPassBegin->renderPass); 627 if (!renderPass) return; 628 629 const VkRenderPassCreateInfo *pRenderPassInfo = renderPass->createInfo.ptr(); 630 if (framebuffer_state) { 631 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) { 632 auto image_view = framebuffer_state->createInfo.pAttachments[i]; 633 SetImageViewLayout(device_data, pCB, image_view, pRenderPassInfo->pAttachments[i].finalLayout); 634 } 635 } 636} 637 638bool PreCallValidateCreateImage(layer_data *device_data, const VkImageCreateInfo *pCreateInfo, 639 const VkAllocationCallbacks *pAllocator, VkImage *pImage) { 640 bool skip = false; 641 const debug_report_data *report_data = core_validation::GetReportData(device_data); 642 643 if (pCreateInfo->format == VK_FORMAT_UNDEFINED) { 644 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 645 VALIDATION_ERROR_09e0075e, "IMAGE", "vkCreateImage: VkFormat for image must not be VK_FORMAT_UNDEFINED. %s", 646 validation_error_map[VALIDATION_ERROR_09e0075e]); 647 648 return skip; 649 } 650 651 const VkFormatProperties *properties = GetFormatProperties(device_data, pCreateInfo->format); 652 653 if ((pCreateInfo->tiling == VK_IMAGE_TILING_LINEAR) && (properties->linearTilingFeatures == 0)) { 654 std::stringstream ss; 655 ss << "vkCreateImage format parameter (" << string_VkFormat(pCreateInfo->format) << ") is an unsupported format"; 656 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 657 VALIDATION_ERROR_09e007a2, "IMAGE", "%s. %s", ss.str().c_str(), 658 validation_error_map[VALIDATION_ERROR_09e007a2]); 659 660 return skip; 661 } 662 663 if ((pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL) && (properties->optimalTilingFeatures == 0)) { 664 std::stringstream ss; 665 ss << "vkCreateImage format parameter (" << string_VkFormat(pCreateInfo->format) << ") is an unsupported format"; 666 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 667 VALIDATION_ERROR_09e007ac, "IMAGE", "%s. %s", ss.str().c_str(), 668 validation_error_map[VALIDATION_ERROR_09e007ac]); 669 670 return skip; 671 } 672 673 // Validate that format supports usage as color attachment 674 if (pCreateInfo->usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) { 675 if ((pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL) && 676 ((properties->optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0)) { 677 std::stringstream ss; 678 ss << "vkCreateImage: VkFormat for TILING_OPTIMAL image (" << string_VkFormat(pCreateInfo->format) 679 << ") does not support requested Image usage type VK_IMAGE_USAGE_COLOR_ATTACHMENT"; 680 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 681 VALIDATION_ERROR_09e007b2, "IMAGE", "%s. %s", ss.str().c_str(), 682 validation_error_map[VALIDATION_ERROR_09e007b2]); 683 } 684 if ((pCreateInfo->tiling == VK_IMAGE_TILING_LINEAR) && 685 ((properties->linearTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0)) { 686 std::stringstream ss; 687 ss << "vkCreateImage: VkFormat for TILING_LINEAR image (" << string_VkFormat(pCreateInfo->format) 688 << ") does not support requested Image usage type VK_IMAGE_USAGE_COLOR_ATTACHMENT"; 689 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 690 VALIDATION_ERROR_09e007a8, "IMAGE", "%s. %s", ss.str().c_str(), 691 validation_error_map[VALIDATION_ERROR_09e007a8]); 692 } 693 } 694 695 // Validate that format supports usage as depth/stencil attachment 696 if (pCreateInfo->usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { 697 if ((pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL) && 698 ((properties->optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0)) { 699 std::stringstream ss; 700 ss << "vkCreateImage: VkFormat for TILING_OPTIMAL image (" << string_VkFormat(pCreateInfo->format) 701 << ") does not support requested Image usage type VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT"; 702 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 703 VALIDATION_ERROR_09e007b4, "IMAGE", "%s. %s", ss.str().c_str(), 704 validation_error_map[VALIDATION_ERROR_09e007b4]); 705 } 706 if ((pCreateInfo->tiling == VK_IMAGE_TILING_LINEAR) && 707 ((properties->linearTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0)) { 708 std::stringstream ss; 709 ss << "vkCreateImage: VkFormat for TILING_LINEAR image (" << string_VkFormat(pCreateInfo->format) 710 << ") does not support requested Image usage type VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT"; 711 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 712 VALIDATION_ERROR_09e007aa, "IMAGE", "%s. %s", ss.str().c_str(), 713 validation_error_map[VALIDATION_ERROR_09e007aa]); 714 } 715 } 716 717 const VkImageFormatProperties *ImageFormatProperties = GetImageFormatProperties( 718 device_data, pCreateInfo->format, pCreateInfo->imageType, pCreateInfo->tiling, pCreateInfo->usage, pCreateInfo->flags); 719 720 VkDeviceSize imageGranularity = GetPhysicalDeviceProperties(device_data)->limits.bufferImageGranularity; 721 imageGranularity = imageGranularity == 1 ? 0 : imageGranularity; 722 // TODO : This is also covering 2918 & 2919. Break out into separate checks 723 if ((pCreateInfo->extent.width <= 0) || (pCreateInfo->extent.height <= 0) || (pCreateInfo->extent.depth <= 0)) { 724 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 725 VALIDATION_ERROR_09e007b8, "Image", 726 "CreateImage extent is 0 for at least one required dimension for image: " 727 "Width = %d Height = %d Depth = %d. %s", 728 pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth, 729 validation_error_map[VALIDATION_ERROR_09e007b8]); 730 } 731 732 // TODO: VALIDATION_ERROR_09e00770 VALIDATION_ERROR_09e00772 VALIDATION_ERROR_09e00776 VALIDATION_ERROR_09e0076e 733 // All these extent-related VUs should be checked here 734 if ((pCreateInfo->extent.depth > ImageFormatProperties->maxExtent.depth) || 735 (pCreateInfo->extent.width > ImageFormatProperties->maxExtent.width) || 736 (pCreateInfo->extent.height > ImageFormatProperties->maxExtent.height)) { 737 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 738 IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image", 739 "CreateImage extents exceed allowable limits for format: " 740 "Width = %d Height = %d Depth = %d: Limits for Width = %d Height = %d Depth = %d for format %s.", 741 pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth, 742 ImageFormatProperties->maxExtent.width, ImageFormatProperties->maxExtent.height, 743 ImageFormatProperties->maxExtent.depth, string_VkFormat(pCreateInfo->format)); 744 } 745 746 uint64_t totalSize = 747 ((uint64_t)pCreateInfo->extent.width * (uint64_t)pCreateInfo->extent.height * (uint64_t)pCreateInfo->extent.depth * 748 (uint64_t)pCreateInfo->arrayLayers * (uint64_t)pCreateInfo->samples * (uint64_t)FormatSize(pCreateInfo->format) + 749 (uint64_t)imageGranularity) & 750 ~(uint64_t)imageGranularity; 751 752 if (totalSize > ImageFormatProperties->maxResourceSize) { 753 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 754 IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image", 755 "CreateImage resource size exceeds allowable maximum " 756 "Image resource size = 0x%" PRIxLEAST64 ", maximum resource size = 0x%" PRIxLEAST64 " ", 757 totalSize, ImageFormatProperties->maxResourceSize); 758 } 759 760 // TODO: VALIDATION_ERROR_09e0077e 761 if (pCreateInfo->mipLevels > ImageFormatProperties->maxMipLevels) { 762 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 763 IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image", 764 "CreateImage mipLevels=%d exceeds allowable maximum supported by format of %d", pCreateInfo->mipLevels, 765 ImageFormatProperties->maxMipLevels); 766 } 767 768 if (pCreateInfo->arrayLayers > ImageFormatProperties->maxArrayLayers) { 769 skip |= 770 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 771 VALIDATION_ERROR_09e00780, "Image", 772 "CreateImage arrayLayers=%d exceeds allowable maximum supported by format of %d. %s", pCreateInfo->arrayLayers, 773 ImageFormatProperties->maxArrayLayers, validation_error_map[VALIDATION_ERROR_09e00780]); 774 } 775 776 if ((pCreateInfo->samples & ImageFormatProperties->sampleCounts) == 0) { 777 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 778 VALIDATION_ERROR_09e0078e, "Image", "CreateImage samples %s is not supported by format 0x%.8X. %s", 779 string_VkSampleCountFlagBits(pCreateInfo->samples), ImageFormatProperties->sampleCounts, 780 validation_error_map[VALIDATION_ERROR_09e0078e]); 781 } 782 783 if (pCreateInfo->initialLayout != VK_IMAGE_LAYOUT_UNDEFINED && pCreateInfo->initialLayout != VK_IMAGE_LAYOUT_PREINITIALIZED) { 784 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0, __LINE__, 785 VALIDATION_ERROR_09e0b801, "Image", 786 "vkCreateImage parameter, pCreateInfo->initialLayout, must be VK_IMAGE_LAYOUT_UNDEFINED or " 787 "VK_IMAGE_LAYOUT_PREINITIALIZED. %s", 788 validation_error_map[VALIDATION_ERROR_09e0b801]); 789 } 790 791 if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures(device_data)->sparseBinding)) { 792 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 793 VALIDATION_ERROR_09e00792, "DS", 794 "vkCreateImage(): the sparseBinding device feature is disabled: Images cannot be created with the " 795 "VK_IMAGE_CREATE_SPARSE_BINDING_BIT set. %s", 796 validation_error_map[VALIDATION_ERROR_09e00792]); 797 } 798 799 if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyAliased)) { 800 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 801 DRAWSTATE_INVALID_FEATURE, "DS", 802 "vkCreateImage(): the sparseResidencyAliased device feature is disabled: Images cannot be created with the " 803 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT set."); 804 } 805 806 return skip; 807} 808 809void PostCallRecordCreateImage(layer_data *device_data, const VkImageCreateInfo *pCreateInfo, VkImage *pImage) { 810 IMAGE_LAYOUT_NODE image_state; 811 image_state.layout = pCreateInfo->initialLayout; 812 image_state.format = pCreateInfo->format; 813 GetImageMap(device_data)->insert(std::make_pair(*pImage, std::unique_ptr<IMAGE_STATE>(new IMAGE_STATE(*pImage, pCreateInfo)))); 814 ImageSubresourcePair subpair{*pImage, false, VkImageSubresource()}; 815 (*core_validation::GetImageSubresourceMap(device_data))[*pImage].push_back(subpair); 816 (*core_validation::GetImageLayoutMap(device_data))[subpair] = image_state; 817} 818 819bool PreCallValidateDestroyImage(layer_data *device_data, VkImage image, IMAGE_STATE **image_state, VK_OBJECT *obj_struct) { 820 const CHECK_DISABLED *disabled = core_validation::GetDisables(device_data); 821 *image_state = core_validation::GetImageState(device_data, image); 822 *obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage}; 823 if (disabled->destroy_image) return false; 824 bool skip = false; 825 if (*image_state) { 826 skip |= core_validation::ValidateObjectNotInUse(device_data, *image_state, *obj_struct, VALIDATION_ERROR_252007d0); 827 } 828 return skip; 829} 830 831void PostCallRecordDestroyImage(layer_data *device_data, VkImage image, IMAGE_STATE *image_state, VK_OBJECT obj_struct) { 832 core_validation::invalidateCommandBuffers(device_data, image_state->cb_bindings, obj_struct); 833 // Clean up memory mapping, bindings and range references for image 834 for (auto mem_binding : image_state->GetBoundMemory()) { 835 auto mem_info = core_validation::GetMemObjInfo(device_data, mem_binding); 836 if (mem_info) { 837 core_validation::RemoveImageMemoryRange(obj_struct.handle, mem_info); 838 } 839 } 840 core_validation::ClearMemoryObjectBindings(device_data, obj_struct.handle, kVulkanObjectTypeImage); 841 // Remove image from imageMap 842 core_validation::GetImageMap(device_data)->erase(image); 843 std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *imageSubresourceMap = 844 core_validation::GetImageSubresourceMap(device_data); 845 846 const auto &sub_entry = imageSubresourceMap->find(image); 847 if (sub_entry != imageSubresourceMap->end()) { 848 for (const auto &pair : sub_entry->second) { 849 core_validation::GetImageLayoutMap(device_data)->erase(pair); 850 } 851 imageSubresourceMap->erase(sub_entry); 852 } 853} 854 855bool ValidateImageAttributes(layer_data *device_data, IMAGE_STATE *image_state, VkImageSubresourceRange range) { 856 bool skip = false; 857 const debug_report_data *report_data = core_validation::GetReportData(device_data); 858 859 if (range.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) { 860 char const str[] = "vkCmdClearColorImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_COLOR_BIT"; 861 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 862 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", str); 863 } 864 865 if (FormatIsDepthOrStencil(image_state->createInfo.format)) { 866 char const str[] = "vkCmdClearColorImage called with depth/stencil image."; 867 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 868 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_1880000e, "IMAGE", "%s. %s", str, 869 validation_error_map[VALIDATION_ERROR_1880000e]); 870 } else if (FormatIsCompressed(image_state->createInfo.format)) { 871 char const str[] = "vkCmdClearColorImage called with compressed image."; 872 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 873 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_1880000e, "IMAGE", "%s. %s", str, 874 validation_error_map[VALIDATION_ERROR_1880000e]); 875 } 876 877 if (!(image_state->createInfo.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) { 878 char const str[] = "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT."; 879 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 880 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_18800004, "IMAGE", "%s. %s", str, 881 validation_error_map[VALIDATION_ERROR_18800004]); 882 } 883 return skip; 884} 885 886uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) { 887 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS 888 uint32_t mip_level_count = range->levelCount; 889 if (range->levelCount == VK_REMAINING_MIP_LEVELS) { 890 mip_level_count = mip_levels - range->baseMipLevel; 891 } 892 return mip_level_count; 893} 894 895uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) { 896 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS 897 uint32_t array_layer_count = range->layerCount; 898 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) { 899 array_layer_count = layers - range->baseArrayLayer; 900 } 901 return array_layer_count; 902} 903 904bool VerifyClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *image_state, 905 VkImageSubresourceRange range, VkImageLayout dest_image_layout, const char *func_name) { 906 bool skip = false; 907 const debug_report_data *report_data = core_validation::GetReportData(device_data); 908 909 uint32_t level_count = ResolveRemainingLevels(&range, image_state->createInfo.mipLevels); 910 uint32_t layer_count = ResolveRemainingLayers(&range, image_state->createInfo.arrayLayers); 911 912 if (dest_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) { 913 if (dest_image_layout == VK_IMAGE_LAYOUT_GENERAL) { 914 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 915 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning. 916 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 917 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 918 "%s: Layout for cleared image should be TRANSFER_DST_OPTIMAL instead of GENERAL.", func_name); 919 } 920 } else if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR == dest_image_layout) { 921 if (!GetDeviceExtensions(device_data)->vk_khr_shared_presentable_image) { 922 // TODO: Add unique error id when available. 923 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 924 HandleToUint64(image_state->image), __LINE__, 0, "DS", 925 "Must enable VK_KHR_shared_presentable_image extension before creating images with a layout type " 926 "of VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR."); 927 928 } else { 929 if (image_state->shared_presentable) { 930 skip |= log_msg( 931 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 932 HandleToUint64(image_state->image), __LINE__, 0, "DS", 933 "Layout for shared presentable cleared image is %s but can only be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.", 934 string_VkImageLayout(dest_image_layout)); 935 } 936 } 937 } else { 938 UNIQUE_VALIDATION_ERROR_CODE error_code = VALIDATION_ERROR_1880000a; 939 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) { 940 error_code = VALIDATION_ERROR_18a00018; 941 } else { 942 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0); 943 } 944 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 945 HandleToUint64(image_state->image), __LINE__, error_code, "DS", 946 "%s: Layout for cleared image is %s but can only be " 947 "TRANSFER_DST_OPTIMAL or GENERAL. %s", 948 func_name, string_VkImageLayout(dest_image_layout), validation_error_map[error_code]); 949 } 950 } 951 952 for (uint32_t level_index = 0; level_index < level_count; ++level_index) { 953 uint32_t level = level_index + range.baseMipLevel; 954 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) { 955 uint32_t layer = layer_index + range.baseArrayLayer; 956 VkImageSubresource sub = {range.aspectMask, level, layer}; 957 IMAGE_CMD_BUF_LAYOUT_NODE node; 958 if (FindCmdBufLayout(device_data, cb_node, image_state->image, sub, node)) { 959 if (node.layout != dest_image_layout) { 960 UNIQUE_VALIDATION_ERROR_CODE error_code = VALIDATION_ERROR_18800008; 961 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) { 962 error_code = VALIDATION_ERROR_18a00016; 963 } else { 964 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0); 965 } 966 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, 967 __LINE__, error_code, "DS", 968 "%s: Cannot clear an image whose layout is %s and " 969 "doesn't match the current layout %s. %s", 970 func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout), 971 validation_error_map[error_code]); 972 } 973 } 974 } 975 } 976 977 return skip; 978} 979 980void RecordClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImage image, VkImageSubresourceRange range, 981 VkImageLayout dest_image_layout) { 982 VkImageCreateInfo *image_create_info = &(GetImageState(device_data, image)->createInfo); 983 uint32_t level_count = ResolveRemainingLevels(&range, image_create_info->mipLevels); 984 uint32_t layer_count = ResolveRemainingLayers(&range, image_create_info->arrayLayers); 985 986 for (uint32_t level_index = 0; level_index < level_count; ++level_index) { 987 uint32_t level = level_index + range.baseMipLevel; 988 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) { 989 uint32_t layer = layer_index + range.baseArrayLayer; 990 VkImageSubresource sub = {range.aspectMask, level, layer}; 991 IMAGE_CMD_BUF_LAYOUT_NODE node; 992 if (!FindCmdBufLayout(device_data, cb_node, image, sub, node)) { 993 SetLayout(device_data, cb_node, image, sub, IMAGE_CMD_BUF_LAYOUT_NODE(dest_image_layout, dest_image_layout)); 994 } 995 } 996 } 997} 998 999bool PreCallValidateCmdClearColorImage(layer_data *dev_data, VkCommandBuffer commandBuffer, VkImage image, 1000 VkImageLayout imageLayout, uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 1001 bool skip = false; 1002 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state 1003 auto cb_node = GetCBNode(dev_data, commandBuffer); 1004 auto image_state = GetImageState(dev_data, image); 1005 if (cb_node && image_state) { 1006 skip |= ValidateMemoryIsBoundToImage(dev_data, image_state, "vkCmdClearColorImage()", VALIDATION_ERROR_18800006); 1007 skip |= ValidateCmdQueueFlags(dev_data, cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, 1008 VALIDATION_ERROR_18802415); 1009 skip |= ValidateCmd(dev_data, cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()"); 1010 skip |= insideRenderPass(dev_data, cb_node, "vkCmdClearColorImage()", VALIDATION_ERROR_18800017); 1011 for (uint32_t i = 0; i < rangeCount; ++i) { 1012 std::string param_name = "pRanges[" + std::to_string(i) + "]"; 1013 skip |= 1014 ValidateImageSubresourceRange(dev_data, image_state, false, pRanges[i], "vkCmdClearColorImage", param_name.c_str()); 1015 skip |= ValidateImageAttributes(dev_data, image_state, pRanges[i]); 1016 skip |= VerifyClearImageLayout(dev_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()"); 1017 } 1018 } 1019 return skip; 1020} 1021 1022// This state recording routine is shared between ClearColorImage and ClearDepthStencilImage 1023void PreCallRecordCmdClearImage(layer_data *dev_data, VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 1024 uint32_t rangeCount, const VkImageSubresourceRange *pRanges, CMD_TYPE cmd_type) { 1025 auto cb_node = GetCBNode(dev_data, commandBuffer); 1026 auto image_state = GetImageState(dev_data, image); 1027 if (cb_node && image_state) { 1028 AddCommandBufferBindingImage(dev_data, cb_node, image_state); 1029 std::function<bool()> function = [=]() { 1030 SetImageMemoryValid(dev_data, image_state, true); 1031 return false; 1032 }; 1033 cb_node->validate_functions.push_back(function); 1034 core_validation::UpdateCmdBufferLastCmd(cb_node, cmd_type); 1035 for (uint32_t i = 0; i < rangeCount; ++i) { 1036 RecordClearImageLayout(dev_data, cb_node, image, pRanges[i], imageLayout); 1037 } 1038 } 1039} 1040 1041bool PreCallValidateCmdClearDepthStencilImage(layer_data *device_data, VkCommandBuffer commandBuffer, VkImage image, 1042 VkImageLayout imageLayout, uint32_t rangeCount, 1043 const VkImageSubresourceRange *pRanges) { 1044 bool skip = false; 1045 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1046 1047 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state 1048 auto cb_node = GetCBNode(device_data, commandBuffer); 1049 auto image_state = GetImageState(device_data, image); 1050 if (cb_node && image_state) { 1051 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearDepthStencilImage()", VALIDATION_ERROR_18a00014); 1052 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT, 1053 VALIDATION_ERROR_18a02415); 1054 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()"); 1055 skip |= insideRenderPass(device_data, cb_node, "vkCmdClearDepthStencilImage()", VALIDATION_ERROR_18a00017); 1056 for (uint32_t i = 0; i < rangeCount; ++i) { 1057 std::string param_name = "pRanges[" + std::to_string(i) + "]"; 1058 skip |= ValidateImageSubresourceRange(device_data, image_state, false, pRanges[i], "vkCmdClearDepthStencilImage", 1059 param_name.c_str()); 1060 skip |= 1061 VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()"); 1062 // Image aspect must be depth or stencil or both 1063 if (((pRanges[i].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) && 1064 ((pRanges[i].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) { 1065 char const str[] = 1066 "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be " 1067 "set to VK_IMAGE_ASPECT_DEPTH_BIT and/or VK_IMAGE_ASPECT_STENCIL_BIT"; 1068 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1069 HandleToUint64(commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", str); 1070 } 1071 } 1072 if (image_state && !FormatIsDepthOrStencil(image_state->createInfo.format)) { 1073 char const str[] = "vkCmdClearDepthStencilImage called without a depth/stencil image."; 1074 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1075 HandleToUint64(image), __LINE__, VALIDATION_ERROR_18a0001c, "IMAGE", "%s. %s", str, 1076 validation_error_map[VALIDATION_ERROR_18a0001c]); 1077 } 1078 } 1079 return skip; 1080} 1081 1082// Returns true if [x, xoffset] and [y, yoffset] overlap 1083static bool RangesIntersect(int32_t start, uint32_t start_offset, int32_t end, uint32_t end_offset) { 1084 bool result = false; 1085 uint32_t intersection_min = std::max(static_cast<uint32_t>(start), static_cast<uint32_t>(end)); 1086 uint32_t intersection_max = std::min(static_cast<uint32_t>(start) + start_offset, static_cast<uint32_t>(end) + end_offset); 1087 1088 if (intersection_max > intersection_min) { 1089 result = true; 1090 } 1091 return result; 1092} 1093 1094// Returns true if two VkImageCopy structures overlap 1095static bool RegionIntersects(const VkImageCopy *src, const VkImageCopy *dst, VkImageType type) { 1096 bool result = false; 1097 if ((src->srcSubresource.mipLevel == dst->dstSubresource.mipLevel) && 1098 (RangesIntersect(src->srcSubresource.baseArrayLayer, src->srcSubresource.layerCount, dst->dstSubresource.baseArrayLayer, 1099 dst->dstSubresource.layerCount))) { 1100 result = true; 1101 switch (type) { 1102 case VK_IMAGE_TYPE_3D: 1103 result &= RangesIntersect(src->srcOffset.z, src->extent.depth, dst->dstOffset.z, dst->extent.depth); 1104 // Intentionally fall through to 2D case 1105 case VK_IMAGE_TYPE_2D: 1106 result &= RangesIntersect(src->srcOffset.y, src->extent.height, dst->dstOffset.y, dst->extent.height); 1107 // Intentionally fall through to 1D case 1108 case VK_IMAGE_TYPE_1D: 1109 result &= RangesIntersect(src->srcOffset.x, src->extent.width, dst->dstOffset.x, dst->extent.width); 1110 break; 1111 default: 1112 // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation 1113 assert(false); 1114 } 1115 } 1116 return result; 1117} 1118 1119// Returns non-zero if offset and extent exceed image extents 1120static const uint32_t x_bit = 1; 1121static const uint32_t y_bit = 2; 1122static const uint32_t z_bit = 4; 1123static uint32_t ExceedsBounds(const VkOffset3D *offset, const VkExtent3D *extent, const VkExtent3D *image_extent) { 1124 uint32_t result = 0; 1125 // Extents/depths cannot be negative but checks left in for clarity 1126 if ((offset->z + extent->depth > image_extent->depth) || (offset->z < 0) || 1127 ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) { 1128 result |= z_bit; 1129 } 1130 if ((offset->y + extent->height > image_extent->height) || (offset->y < 0) || 1131 ((offset->y + static_cast<int32_t>(extent->height)) < 0)) { 1132 result |= y_bit; 1133 } 1134 if ((offset->x + extent->width > image_extent->width) || (offset->x < 0) || 1135 ((offset->x + static_cast<int32_t>(extent->width)) < 0)) { 1136 result |= x_bit; 1137 } 1138 return result; 1139} 1140 1141// Test if two VkExtent3D structs are equivalent 1142static inline bool IsExtentEqual(const VkExtent3D *extent, const VkExtent3D *other_extent) { 1143 bool result = true; 1144 if ((extent->width != other_extent->width) || (extent->height != other_extent->height) || 1145 (extent->depth != other_extent->depth)) { 1146 result = false; 1147 } 1148 return result; 1149} 1150 1151// Returns the effective extent of an image subresource, adjusted for mip level and array depth. 1152static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const VkImageSubresourceLayers *subresource) { 1153 const uint32_t mip = subresource->mipLevel; 1154 1155 // Return zero extent if mip level doesn't exist 1156 if (mip >= img->createInfo.mipLevels) { 1157 return VkExtent3D{0, 0, 0}; 1158 } 1159 1160 // Don't allow mip adjustment to create 0 dim, but pass along a 0 if that's what subresource specified 1161 VkExtent3D extent = img->createInfo.extent; 1162 extent.width = (0 == extent.width ? 0 : std::max(1U, extent.width >> mip)); 1163 extent.height = (0 == extent.height ? 0 : std::max(1U, extent.height >> mip)); 1164 extent.depth = (0 == extent.depth ? 0 : std::max(1U, extent.depth >> mip)); 1165 1166 // Image arrays have an effective z extent that isn't diminished by mip level 1167 if (VK_IMAGE_TYPE_3D != img->createInfo.imageType) { 1168 extent.depth = img->createInfo.arrayLayers; 1169 } 1170 1171 return extent; 1172} 1173 1174// Test if the extent argument has all dimensions set to 0. 1175static inline bool IsExtentAllZeroes(const VkExtent3D *extent) { 1176 return ((extent->width == 0) && (extent->height == 0) && (extent->depth == 0)); 1177} 1178 1179// Test if the extent argument has any dimensions set to 0. 1180static inline bool IsExtentSizeZero(const VkExtent3D *extent) { 1181 return ((extent->width == 0) || (extent->height == 0) || (extent->depth == 0)); 1182} 1183 1184// Returns the image transfer granularity for a specific image scaled by compressed block size if necessary. 1185static inline VkExtent3D GetScaledItg(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img) { 1186 // Default to (0, 0, 0) granularity in case we can't find the real granularity for the physical device. 1187 VkExtent3D granularity = {0, 0, 0}; 1188 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 1189 if (pPool) { 1190 granularity = 1191 GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].minImageTransferGranularity; 1192 if (FormatIsCompressed(img->createInfo.format)) { 1193 auto block_size = FormatCompressedTexelBlockExtent(img->createInfo.format); 1194 granularity.width *= block_size.width; 1195 granularity.height *= block_size.height; 1196 } 1197 } 1198 return granularity; 1199} 1200 1201// Test elements of a VkExtent3D structure against alignment constraints contained in another VkExtent3D structure 1202static inline bool IsExtentAligned(const VkExtent3D *extent, const VkExtent3D *granularity) { 1203 bool valid = true; 1204 if ((SafeModulo(extent->depth, granularity->depth) != 0) || (SafeModulo(extent->width, granularity->width) != 0) || 1205 (SafeModulo(extent->height, granularity->height) != 0)) { 1206 valid = false; 1207 } 1208 return valid; 1209} 1210 1211// Check elements of a VkOffset3D structure against a queue family's Image Transfer Granularity values 1212static inline bool CheckItgOffset(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkOffset3D *offset, 1213 const VkExtent3D *granularity, const uint32_t i, const char *function, const char *member) { 1214 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1215 bool skip = false; 1216 VkExtent3D offset_extent = {}; 1217 offset_extent.width = static_cast<uint32_t>(abs(offset->x)); 1218 offset_extent.height = static_cast<uint32_t>(abs(offset->y)); 1219 offset_extent.depth = static_cast<uint32_t>(abs(offset->z)); 1220 if (IsExtentAllZeroes(granularity)) { 1221 // If the queue family image transfer granularity is (0, 0, 0), then the offset must always be (0, 0, 0) 1222 if (IsExtentAllZeroes(&offset_extent) == false) { 1223 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1224 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1225 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) must be (x=0, y=0, z=0) " 1226 "when the command buffer's queue family image transfer granularity is (w=0, h=0, d=0).", 1227 function, i, member, offset->x, offset->y, offset->z); 1228 } 1229 } else { 1230 // If the queue family image transfer granularity is not (0, 0, 0), then the offset dimensions must always be even 1231 // integer multiples of the image transfer granularity. 1232 if (IsExtentAligned(&offset_extent, granularity) == false) { 1233 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1234 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1235 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) dimensions must be even integer " 1236 "multiples of this command buffer's queue family image transfer granularity (w=%d, h=%d, d=%d).", 1237 function, i, member, offset->x, offset->y, offset->z, granularity->width, granularity->height, 1238 granularity->depth); 1239 } 1240 } 1241 return skip; 1242} 1243 1244// Check elements of a VkExtent3D structure against a queue family's Image Transfer Granularity values 1245static inline bool CheckItgExtent(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkExtent3D *extent, 1246 const VkOffset3D *offset, const VkExtent3D *granularity, const VkExtent3D *subresource_extent, 1247 const uint32_t i, const char *function, const char *member) { 1248 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1249 bool skip = false; 1250 if (IsExtentAllZeroes(granularity)) { 1251 // If the queue family image transfer granularity is (0, 0, 0), then the extent must always match the image 1252 // subresource extent. 1253 if (IsExtentEqual(extent, subresource_extent) == false) { 1254 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1255 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1256 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d) " 1257 "when the command buffer's queue family image transfer granularity is (w=0, h=0, d=0).", 1258 function, i, member, extent->width, extent->height, extent->depth, subresource_extent->width, 1259 subresource_extent->height, subresource_extent->depth); 1260 } 1261 } else { 1262 // If the queue family image transfer granularity is not (0, 0, 0), then the extent dimensions must always be even 1263 // integer multiples of the image transfer granularity or the offset + extent dimensions must always match the image 1264 // subresource extent dimensions. 1265 VkExtent3D offset_extent_sum = {}; 1266 offset_extent_sum.width = static_cast<uint32_t>(abs(offset->x)) + extent->width; 1267 offset_extent_sum.height = static_cast<uint32_t>(abs(offset->y)) + extent->height; 1268 offset_extent_sum.depth = static_cast<uint32_t>(abs(offset->z)) + extent->depth; 1269 1270 bool x_ok = 1271 ((0 == SafeModulo(extent->width, granularity->width)) || (subresource_extent->width == offset_extent_sum.width)); 1272 bool y_ok = 1273 ((0 == SafeModulo(extent->height, granularity->height)) || (subresource_extent->height == offset_extent_sum.height)); 1274 bool z_ok = 1275 ((0 == SafeModulo(extent->depth, granularity->depth)) || (subresource_extent->depth == offset_extent_sum.depth)); 1276 1277 if (!(x_ok && y_ok && z_ok)) { 1278 skip |= 1279 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1280 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1281 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) dimensions must be even integer multiples of this command buffer's " 1282 "queue family image transfer granularity (w=%d, h=%d, d=%d) or offset (x=%d, y=%d, z=%d) + " 1283 "extent (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d).", 1284 function, i, member, extent->width, extent->height, extent->depth, granularity->width, granularity->height, 1285 granularity->depth, offset->x, offset->y, offset->z, extent->width, extent->height, extent->depth, 1286 subresource_extent->width, subresource_extent->height, subresource_extent->depth); 1287 } 1288 } 1289 return skip; 1290} 1291 1292// Check a uint32_t width or stride value against a queue family's Image Transfer Granularity width value 1293static inline bool CheckItgInt(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const uint32_t value, 1294 const uint32_t granularity, const uint32_t i, const char *function, const char *member) { 1295 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1296 1297 bool skip = false; 1298 if (SafeModulo(value, granularity) != 0) { 1299 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1300 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1301 "%s: pRegion[%d].%s (%d) must be an even integer multiple of this command buffer's queue family image " 1302 "transfer granularity width (%d).", 1303 function, i, member, value, granularity); 1304 } 1305 return skip; 1306} 1307 1308// Check a VkDeviceSize value against a queue family's Image Transfer Granularity width value 1309static inline bool CheckItgSize(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkDeviceSize value, 1310 const uint32_t granularity, const uint32_t i, const char *function, const char *member) { 1311 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1312 bool skip = false; 1313 if (SafeModulo(value, granularity) != 0) { 1314 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1315 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS", 1316 "%s: pRegion[%d].%s (%" PRIdLEAST64 1317 ") must be an even integer multiple of this command buffer's queue family image transfer " 1318 "granularity width (%d).", 1319 function, i, member, value, granularity); 1320 } 1321 return skip; 1322} 1323 1324// Check valid usage Image Tranfer Granularity requirements for elements of a VkBufferImageCopy structure 1325bool ValidateCopyBufferImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, 1326 const IMAGE_STATE *img, const VkBufferImageCopy *region, 1327 const uint32_t i, const char *function) { 1328 bool skip = false; 1329 if (FormatIsCompressed(img->createInfo.format) == true) { 1330 // TODO: Add granularity checking for compressed formats 1331 1332 // bufferRowLength must be a multiple of the compressed texel block width 1333 // bufferImageHeight must be a multiple of the compressed texel block height 1334 // all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block 1335 // bufferOffset must be a multiple of the compressed texel block size in bytes 1336 // imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) 1337 // must equal the image subresource width 1338 // imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) 1339 // must equal the image subresource height 1340 // imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) 1341 // must equal the image subresource depth 1342 } else { 1343 VkExtent3D granularity = GetScaledItg(device_data, cb_node, img); 1344 skip |= CheckItgSize(device_data, cb_node, region->bufferOffset, granularity.width, i, function, "bufferOffset"); 1345 skip |= CheckItgInt(device_data, cb_node, region->bufferRowLength, granularity.width, i, function, "bufferRowLength"); 1346 skip |= CheckItgInt(device_data, cb_node, region->bufferImageHeight, granularity.width, i, function, "bufferImageHeight"); 1347 skip |= CheckItgOffset(device_data, cb_node, ®ion->imageOffset, &granularity, i, function, "imageOffset"); 1348 VkExtent3D subresource_extent = GetImageSubresourceExtent(img, ®ion->imageSubresource); 1349 skip |= CheckItgExtent(device_data, cb_node, ®ion->imageExtent, ®ion->imageOffset, &granularity, &subresource_extent, 1350 i, function, "imageExtent"); 1351 } 1352 return skip; 1353} 1354 1355// Check valid usage Image Tranfer Granularity requirements for elements of a VkImageCopy structure 1356bool ValidateCopyImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, 1357 const IMAGE_STATE *src_img, const IMAGE_STATE *dst_img, 1358 const VkImageCopy *region, const uint32_t i, const char *function) { 1359 bool skip = false; 1360 VkExtent3D granularity = GetScaledItg(device_data, cb_node, src_img); 1361 skip |= CheckItgOffset(device_data, cb_node, ®ion->srcOffset, &granularity, i, function, "srcOffset"); 1362 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, ®ion->srcSubresource); 1363 skip |= CheckItgExtent(device_data, cb_node, ®ion->extent, ®ion->srcOffset, &granularity, &subresource_extent, i, 1364 function, "extent"); 1365 1366 granularity = GetScaledItg(device_data, cb_node, dst_img); 1367 skip |= CheckItgOffset(device_data, cb_node, ®ion->dstOffset, &granularity, i, function, "dstOffset"); 1368 subresource_extent = GetImageSubresourceExtent(dst_img, ®ion->dstSubresource); 1369 skip |= CheckItgExtent(device_data, cb_node, ®ion->extent, ®ion->dstOffset, &granularity, &subresource_extent, i, 1370 function, "extent"); 1371 return skip; 1372} 1373 1374// Validate contents of a VkImageCopy struct 1375bool ValidateImageCopyData(const layer_data *device_data, const debug_report_data *report_data, const uint32_t regionCount, 1376 const VkImageCopy *ic_regions, const IMAGE_STATE *src_state, const IMAGE_STATE *dst_state) { 1377 bool skip = false; 1378 1379 for (uint32_t i = 0; i < regionCount; i++) { 1380 VkImageCopy image_copy = ic_regions[i]; 1381 bool slice_override = false; 1382 uint32_t depth_slices = 0; 1383 1384 // Special case for copying between a 1D/2D array and a 3D image 1385 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up. 1386 if ((VK_IMAGE_TYPE_3D == src_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != dst_state->createInfo.imageType)) { 1387 depth_slices = image_copy.dstSubresource.layerCount; // Slice count from 2D subresource 1388 slice_override = (depth_slices != 1); 1389 } else if ((VK_IMAGE_TYPE_3D == dst_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != src_state->createInfo.imageType)) { 1390 depth_slices = image_copy.srcSubresource.layerCount; // Slice count from 2D subresource 1391 slice_override = (depth_slices != 1); 1392 } 1393 1394 // Do all checks on source image 1395 // 1396 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 1397 if ((0 != image_copy.srcOffset.y) || (1 != image_copy.extent.height)) { 1398 skip |= 1399 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1400 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c00124, "IMAGE", 1401 "vkCmdCopyImage(): pRegion[%d] srcOffset.y is %d and extent.height is %d. For 1D images these must " 1402 "be 0 and 1, respectively. %s", 1403 i, image_copy.srcOffset.y, image_copy.extent.height, validation_error_map[VALIDATION_ERROR_09c00124]); 1404 } 1405 } 1406 1407 if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (src_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 1408 if ((0 != image_copy.srcOffset.z) || (1 != image_copy.extent.depth)) { 1409 skip |= 1410 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1411 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c00128, "IMAGE", 1412 "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d and extent.depth is %d. For 1D and 2D images " 1413 "these must be 0 and 1, respectively. %s", 1414 i, image_copy.srcOffset.z, image_copy.extent.depth, validation_error_map[VALIDATION_ERROR_09c00128]); 1415 } 1416 } 1417 1418 // VU01199 changed with mnt1 1419 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1420 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1421 if ((0 != image_copy.srcSubresource.baseArrayLayer) || (1 != image_copy.srcSubresource.layerCount)) { 1422 skip |= 1423 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1424 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1425 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and srcSubresource.layerCount " 1426 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1427 i, image_copy.srcSubresource.baseArrayLayer, image_copy.srcSubresource.layerCount, 1428 validation_error_map[VALIDATION_ERROR_09c0011a]); 1429 } 1430 } 1431 } else { // Pre maint 1 1432 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1433 if ((0 != image_copy.srcSubresource.baseArrayLayer) || (1 != image_copy.srcSubresource.layerCount)) { 1434 skip |= 1435 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1436 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1437 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and " 1438 "srcSubresource.layerCount is %d. For copies with either source or dest of type " 1439 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s", 1440 i, image_copy.srcSubresource.baseArrayLayer, image_copy.srcSubresource.layerCount, 1441 validation_error_map[VALIDATION_ERROR_09c0011a]); 1442 } 1443 } 1444 } 1445 1446 // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec. 1447 if ((image_copy.srcSubresource.baseArrayLayer >= src_state->createInfo.arrayLayers) || 1448 (image_copy.srcSubresource.baseArrayLayer + image_copy.srcSubresource.layerCount > src_state->createInfo.arrayLayers)) { 1449 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1450 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0012a, "IMAGE", 1451 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer (%d) must be less than the source image's " 1452 "arrayLayers (%d), and the sum of baseArrayLayer and srcSubresource.layerCount (%d) must be less than " 1453 "or equal to the source image's arrayLayers. %s", 1454 i, image_copy.srcSubresource.baseArrayLayer, src_state->createInfo.arrayLayers, 1455 image_copy.srcSubresource.layerCount, validation_error_map[VALIDATION_ERROR_09c0012a]); 1456 } 1457 1458 // Checks that apply only to compressed images 1459 if (FormatIsCompressed(src_state->createInfo.format)) { 1460 VkExtent3D block_size = FormatCompressedTexelBlockExtent(src_state->createInfo.format); 1461 1462 // image offsets must be multiples of block dimensions 1463 if ((SafeModulo(image_copy.srcOffset.x, block_size.width) != 0) || 1464 (SafeModulo(image_copy.srcOffset.y, block_size.height) != 0) || 1465 (SafeModulo(image_copy.srcOffset.z, block_size.depth) != 0)) { 1466 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1467 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0013a, "IMAGE", 1468 "vkCmdCopyImage(): pRegion[%d] srcOffset (%d, %d) must be multiples of the compressed image's " 1469 "texel width & height (%d, %d). %s.", 1470 i, image_copy.srcOffset.x, image_copy.srcOffset.y, block_size.width, block_size.height, 1471 validation_error_map[VALIDATION_ERROR_09c0013a]); 1472 } 1473 1474 // extent width must be a multiple of block width, or extent+offset width must equal subresource width 1475 VkExtent3D mip_extent = GetImageSubresourceExtent(src_state, &(image_copy.srcSubresource)); 1476 if ((SafeModulo(image_copy.extent.width, block_size.width) != 0) && 1477 (image_copy.extent.width + image_copy.srcOffset.x != mip_extent.width)) { 1478 skip |= 1479 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1480 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0013c, "IMAGE", 1481 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1482 "width (%d), or when added to srcOffset.x (%d) must equal the image subresource width (%d). %s.", 1483 i, image_copy.extent.width, block_size.width, image_copy.srcOffset.x, mip_extent.width, 1484 validation_error_map[VALIDATION_ERROR_09c0013c]); 1485 } 1486 1487 // extent height must be a multiple of block height, or extent+offset height must equal subresource height 1488 if ((SafeModulo(image_copy.extent.height, block_size.height) != 0) && 1489 (image_copy.extent.height + image_copy.srcOffset.y != mip_extent.height)) { 1490 skip |= 1491 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1492 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c0013e, "IMAGE", 1493 "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block " 1494 "height (%d), or when added to srcOffset.y (%d) must equal the image subresource height (%d). %s.", 1495 i, image_copy.extent.height, block_size.height, image_copy.srcOffset.y, mip_extent.height, 1496 validation_error_map[VALIDATION_ERROR_09c0013e]); 1497 } 1498 1499 // extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth 1500 uint32_t copy_depth = (slice_override ? depth_slices : image_copy.extent.depth); 1501 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + image_copy.srcOffset.z != mip_extent.depth)) { 1502 skip |= 1503 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1504 reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_09c00140, "IMAGE", 1505 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1506 "depth (%d), or when added to srcOffset.z (%d) must equal the image subresource depth (%d). %s.", 1507 i, image_copy.extent.depth, block_size.depth, image_copy.srcOffset.z, mip_extent.depth, 1508 validation_error_map[VALIDATION_ERROR_09c00140]); 1509 } 1510 } // Compressed 1511 1512 // Do all checks on dest image 1513 // 1514 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 1515 if ((0 != image_copy.dstOffset.y) || (1 != image_copy.extent.height)) { 1516 skip |= 1517 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1518 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00130, "IMAGE", 1519 "vkCmdCopyImage(): pRegion[%d] dstOffset.y is %d and extent.height is %d. For 1D images these must " 1520 "be 0 and 1, respectively. %s", 1521 i, image_copy.dstOffset.y, image_copy.extent.height, validation_error_map[VALIDATION_ERROR_09c00130]); 1522 } 1523 } 1524 1525 if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (dst_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 1526 if ((0 != image_copy.dstOffset.z) || (1 != image_copy.extent.depth)) { 1527 skip |= 1528 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1529 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00134, "IMAGE", 1530 "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d and extent.depth is %d. For 1D and 2D images " 1531 "these must be 0 and 1, respectively. %s", 1532 i, image_copy.dstOffset.z, image_copy.extent.depth, validation_error_map[VALIDATION_ERROR_09c00134]); 1533 } 1534 } 1535 1536 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1537 if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) { 1538 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1539 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1540 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount " 1541 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1542 i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount, 1543 validation_error_map[VALIDATION_ERROR_09c0011a]); 1544 } 1545 } 1546 // VU01199 changed with mnt1 1547 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1548 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1549 if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) { 1550 skip |= 1551 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1552 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1553 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount " 1554 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s", 1555 i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount, 1556 validation_error_map[VALIDATION_ERROR_09c0011a]); 1557 } 1558 } 1559 } else { // Pre maint 1 1560 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 1561 if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) { 1562 skip |= 1563 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1564 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c0011a, "IMAGE", 1565 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and " 1566 "dstSubresource.layerCount is %d. For copies with either source or dest of type " 1567 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s", 1568 i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount, 1569 validation_error_map[VALIDATION_ERROR_09c0011a]); 1570 } 1571 } 1572 } 1573 1574 // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec. 1575 if ((image_copy.dstSubresource.baseArrayLayer >= dst_state->createInfo.arrayLayers) || 1576 (image_copy.dstSubresource.baseArrayLayer + image_copy.dstSubresource.layerCount > dst_state->createInfo.arrayLayers)) { 1577 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1578 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00136, "IMAGE", 1579 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer (%d) must be less than the dest image's " 1580 "arrayLayers (%d), and the sum of baseArrayLayer and dstSubresource.layerCount (%d) must be less than " 1581 "or equal to the dest image's arrayLayers. %s", 1582 i, image_copy.dstSubresource.baseArrayLayer, dst_state->createInfo.arrayLayers, 1583 image_copy.dstSubresource.layerCount, validation_error_map[VALIDATION_ERROR_09c00136]); 1584 } 1585 1586 // Checks that apply only to compressed images 1587 if (FormatIsCompressed(dst_state->createInfo.format)) { 1588 VkExtent3D block_size = FormatCompressedTexelBlockExtent(dst_state->createInfo.format); 1589 1590 // image offsets must be multiples of block dimensions 1591 if ((SafeModulo(image_copy.dstOffset.x, block_size.width) != 0) || 1592 (SafeModulo(image_copy.dstOffset.y, block_size.height) != 0) || 1593 (SafeModulo(image_copy.dstOffset.z, block_size.depth) != 0)) { 1594 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1595 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00144, "IMAGE", 1596 "vkCmdCopyImage(): pRegion[%d] dstOffset (%d, %d) must be multiples of the compressed image's " 1597 "texel width & height (%d, %d). %s.", 1598 i, image_copy.dstOffset.x, image_copy.dstOffset.y, block_size.width, block_size.height, 1599 validation_error_map[VALIDATION_ERROR_09c00144]); 1600 } 1601 1602 // extent width must be a multiple of block width, or extent+offset width must equal subresource width 1603 VkExtent3D mip_extent = GetImageSubresourceExtent(dst_state, &(image_copy.dstSubresource)); 1604 if ((SafeModulo(image_copy.extent.width, block_size.width) != 0) && 1605 (image_copy.extent.width + image_copy.dstOffset.x != mip_extent.width)) { 1606 skip |= 1607 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1608 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00146, "IMAGE", 1609 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1610 "width (%d), or when added to dstOffset.x (%d) must equal the image subresource width (%d). %s.", 1611 i, image_copy.extent.width, block_size.width, image_copy.dstOffset.x, mip_extent.width, 1612 validation_error_map[VALIDATION_ERROR_09c00146]); 1613 } 1614 1615 // extent height must be a multiple of block height, or extent+offset height must equal subresource height 1616 if ((SafeModulo(image_copy.extent.height, block_size.height) != 0) && 1617 (image_copy.extent.height + image_copy.dstOffset.y != mip_extent.height)) { 1618 skip |= 1619 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1620 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c00148, "IMAGE", 1621 "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block " 1622 "height (%d), or when added to dstOffset.y (%d) must equal the image subresource height (%d). %s.", 1623 i, image_copy.extent.height, block_size.height, image_copy.dstOffset.y, mip_extent.height, 1624 validation_error_map[VALIDATION_ERROR_09c00148]); 1625 } 1626 1627 // extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth 1628 uint32_t copy_depth = (slice_override ? depth_slices : image_copy.extent.depth); 1629 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + image_copy.dstOffset.z != mip_extent.depth)) { 1630 skip |= 1631 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 1632 reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_09c0014a, "IMAGE", 1633 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block " 1634 "depth (%d), or when added to dstOffset.z (%d) must equal the image subresource depth (%d). %s.", 1635 i, image_copy.extent.depth, block_size.depth, image_copy.dstOffset.z, mip_extent.depth, 1636 validation_error_map[VALIDATION_ERROR_09c0014a]); 1637 } 1638 } // Compressed 1639 } 1640 return skip; 1641} 1642 1643bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 1644 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageCopy *regions, 1645 VkImageLayout src_image_layout, VkImageLayout dst_image_layout) { 1646 bool skip = false; 1647 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1648 skip = ValidateImageCopyData(device_data, report_data, region_count, regions, src_image_state, dst_image_state); 1649 1650 VkCommandBuffer command_buffer = cb_node->commandBuffer; 1651 1652 for (uint32_t i = 0; i < region_count; i++) { 1653 bool slice_override = false; 1654 uint32_t depth_slices = 0; 1655 1656 // Special case for copying between a 1D/2D array and a 3D image 1657 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up. 1658 if ((VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType) && 1659 (VK_IMAGE_TYPE_3D != dst_image_state->createInfo.imageType)) { 1660 depth_slices = regions[i].dstSubresource.layerCount; // Slice count from 2D subresource 1661 slice_override = (depth_slices != 1); 1662 } else if ((VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType) && 1663 (VK_IMAGE_TYPE_3D != src_image_state->createInfo.imageType)) { 1664 depth_slices = regions[i].srcSubresource.layerCount; // Slice count from 2D subresource 1665 slice_override = (depth_slices != 1); 1666 } 1667 1668 if (regions[i].srcSubresource.layerCount == 0) { 1669 std::stringstream ss; 1670 ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] srcSubresource is zero"; 1671 skip |= 1672 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1673 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str()); 1674 } 1675 1676 if (regions[i].dstSubresource.layerCount == 0) { 1677 std::stringstream ss; 1678 ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] dstSubresource is zero"; 1679 skip |= 1680 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1681 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str()); 1682 } 1683 1684 if (GetDeviceExtensions(device_data)->vk_khr_maintenance1) { 1685 // No chance of mismatch if we're overriding depth slice count 1686 if (!slice_override) { 1687 // The number of depth slices in srcSubresource and dstSubresource must match 1688 // Depth comes from layerCount for 1D,2D resources, from extent.depth for 3D 1689 uint32_t src_slices = 1690 (VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType ? regions[i].extent.depth 1691 : regions[i].srcSubresource.layerCount); 1692 uint32_t dst_slices = 1693 (VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType ? regions[i].extent.depth 1694 : regions[i].dstSubresource.layerCount); 1695 if (src_slices != dst_slices) { 1696 std::stringstream ss; 1697 ss << "vkCmdCopyImage: number of depth slices in source and destination subresources for pRegions[" << i 1698 << "] do not match"; 1699 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1700 reinterpret_cast<uint64_t &>(command_buffer), __LINE__, VALIDATION_ERROR_09c00118, "IMAGE", 1701 "%s. %s", ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c00118]); 1702 } 1703 } 1704 } else { 1705 // For each region the layerCount member of srcSubresource and dstSubresource must match 1706 if (regions[i].srcSubresource.layerCount != regions[i].dstSubresource.layerCount) { 1707 std::stringstream ss; 1708 ss << "vkCmdCopyImage: number of layers in source and destination subresources for pRegions[" << i 1709 << "] do not match"; 1710 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1711 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00118, "IMAGE", "%s. %s", 1712 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c00118]); 1713 } 1714 } 1715 1716 // For each region, the aspectMask member of srcSubresource and dstSubresource must match 1717 if (regions[i].srcSubresource.aspectMask != regions[i].dstSubresource.aspectMask) { 1718 char const str[] = "vkCmdCopyImage: Src and dest aspectMasks for each region must match"; 1719 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1720 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00112, "IMAGE", "%s. %s", str, 1721 validation_error_map[VALIDATION_ERROR_09c00112]); 1722 } 1723 1724 // For each region, the aspectMask member of srcSubresource must be present in the source image 1725 if (!VerifyAspectsPresent(regions[i].srcSubresource.aspectMask, src_image_state->createInfo.format)) { 1726 std::stringstream ss; 1727 ss << "vkCmdCopyImage: pRegion[" << i 1728 << "] srcSubresource.aspectMask cannot specify aspects not present in source image"; 1729 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1730 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0011c, "IMAGE", "%s. %s", 1731 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c0011c]); 1732 } 1733 1734 // For each region, the aspectMask member of dstSubresource must be present in the destination image 1735 if (!VerifyAspectsPresent(regions[i].dstSubresource.aspectMask, dst_image_state->createInfo.format)) { 1736 std::stringstream ss; 1737 ss << "vkCmdCopyImage: pRegion[" << i << "] dstSubresource.aspectMask cannot specify aspects not present in dest image"; 1738 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1739 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0011e, "IMAGE", "%s. %s", 1740 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_09c0011e]); 1741 } 1742 1743 // AspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT 1744 if ((regions[i].srcSubresource.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) || 1745 (regions[i].dstSubresource.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT)) { 1746 std::stringstream ss; 1747 ss << "vkCmdCopyImage: pRegions[" << i << "] may not specify aspectMask containing VK_IMAGE_ASPECT_METADATA_BIT"; 1748 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1749 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600150, "IMAGE", "%s. %s", 1750 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600150]); 1751 } 1752 1753 // For each region, if aspectMask contains VK_IMAGE_ASPECT_COLOR_BIT, it must not contain either of 1754 // VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT 1755 if ((regions[i].srcSubresource.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) && 1756 (regions[i].srcSubresource.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) { 1757 char const str[] = "vkCmdCopyImage aspectMask cannot specify both COLOR and DEPTH/STENCIL aspects"; 1758 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1759 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a60014e, "IMAGE", "%s. %s", str, 1760 validation_error_map[VALIDATION_ERROR_0a60014e]); 1761 } 1762 1763 // MipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created 1764 if (regions[i].srcSubresource.mipLevel >= src_image_state->createInfo.mipLevels) { 1765 std::stringstream ss; 1766 ss << "vkCmdCopyImage: pRegions[" << i 1767 << "] specifies a src mipLevel greater than the number specified when the srcImage was created."; 1768 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1769 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600152, "IMAGE", "%s. %s", 1770 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600152]); 1771 } 1772 if (regions[i].dstSubresource.mipLevel >= dst_image_state->createInfo.mipLevels) { 1773 std::stringstream ss; 1774 ss << "vkCmdCopyImage: pRegions[" << i 1775 << "] specifies a dst mipLevel greater than the number specified when the dstImage was created."; 1776 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1777 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600152, "IMAGE", "%s. %s", 1778 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600152]); 1779 } 1780 1781 // (baseArrayLayer + layerCount) must be less than or equal to the arrayLayers specified in VkImageCreateInfo when the 1782 // image was created 1783 if ((regions[i].srcSubresource.baseArrayLayer + regions[i].srcSubresource.layerCount) > 1784 src_image_state->createInfo.arrayLayers) { 1785 std::stringstream ss; 1786 ss << "vkCmdCopyImage: srcImage arrayLayers was " << src_image_state->createInfo.arrayLayers << " but subRegion[" << i 1787 << "] baseArrayLayer + layerCount is " 1788 << (regions[i].srcSubresource.baseArrayLayer + regions[i].srcSubresource.layerCount); 1789 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1790 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600154, "IMAGE", "%s. %s", 1791 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600154]); 1792 } 1793 if ((regions[i].dstSubresource.baseArrayLayer + regions[i].dstSubresource.layerCount) > 1794 dst_image_state->createInfo.arrayLayers) { 1795 std::stringstream ss; 1796 ss << "vkCmdCopyImage: dstImage arrayLayers was " << dst_image_state->createInfo.arrayLayers << " but subRegion[" << i 1797 << "] baseArrayLayer + layerCount is " 1798 << (regions[i].dstSubresource.baseArrayLayer + regions[i].dstSubresource.layerCount); 1799 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1800 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_0a600154, "IMAGE", "%s. %s", 1801 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_0a600154]); 1802 } 1803 1804 // Check region extents for 1D-1D, 2D-2D, and 3D-3D copies 1805 if (src_image_state->createInfo.imageType == dst_image_state->createInfo.imageType) { 1806 // The source region specified by a given element of regions must be a region that is contained within srcImage 1807 VkExtent3D img_extent = GetImageSubresourceExtent(src_image_state, &(regions[i].srcSubresource)); 1808 if (0 != ExceedsBounds(®ions[i].srcOffset, ®ions[i].extent, &img_extent)) { 1809 std::stringstream ss; 1810 ss << "vkCmdCopyImage: Source pRegion[" << i << "] with mipLevel [ " << regions[i].srcSubresource.mipLevel 1811 << " ], offset [ " << regions[i].srcOffset.x << ", " << regions[i].srcOffset.y << ", " << regions[i].srcOffset.z 1812 << " ], extent [ " << regions[i].extent.width << ", " << regions[i].extent.height << ", " 1813 << regions[i].extent.depth << " ] exceeds the source image dimensions"; 1814 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1815 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f4, "IMAGE", "%s. %s", 1816 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f4]); 1817 } 1818 1819 // The destination region specified by a given element of regions must be a region that is contained within dst_image 1820 img_extent = GetImageSubresourceExtent(dst_image_state, &(regions[i].dstSubresource)); 1821 if (0 != ExceedsBounds(®ions[i].dstOffset, ®ions[i].extent, &img_extent)) { 1822 std::stringstream ss; 1823 ss << "vkCmdCopyImage: Dest pRegion[" << i << "] with mipLevel [ " << regions[i].dstSubresource.mipLevel 1824 << " ], offset [ " << regions[i].dstOffset.x << ", " << regions[i].dstOffset.y << ", " << regions[i].dstOffset.z 1825 << " ], extent [ " << regions[i].extent.width << ", " << regions[i].extent.height << ", " 1826 << regions[i].extent.depth << " ] exceeds the destination image dimensions"; 1827 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1828 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f6, "IMAGE", "%s. %s", 1829 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f6]); 1830 } 1831 } 1832 1833 // Each dimension offset + extent limits must fall with image subresource extent 1834 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_image_state, &(regions[i].srcSubresource)); 1835 VkExtent3D copy_extent = regions[i].extent; 1836 if (slice_override) copy_extent.depth = depth_slices; 1837 uint32_t extent_check = ExceedsBounds(&(regions[i].srcOffset), ©_extent, &subresource_extent); 1838 if (extent_check & x_bit) { 1839 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1840 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00120, "IMAGE", 1841 "vkCmdCopyImage: Source image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource " 1842 "width [%1d]. %s", 1843 i, regions[i].srcOffset.x, regions[i].extent.width, subresource_extent.width, 1844 validation_error_map[VALIDATION_ERROR_09c00120]); 1845 } 1846 1847 if (extent_check & y_bit) { 1848 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1849 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00122, "IMAGE", 1850 "vkCmdCopyImage: Source image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource " 1851 "height [%1d]. %s", 1852 i, regions[i].srcOffset.y, regions[i].extent.height, subresource_extent.height, 1853 validation_error_map[VALIDATION_ERROR_09c00122]); 1854 } 1855 if (extent_check & z_bit) { 1856 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1857 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00126, "IMAGE", 1858 "vkCmdCopyImage: Source image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource " 1859 "depth [%1d]. %s", 1860 i, regions[i].srcOffset.z, copy_extent.depth, subresource_extent.depth, 1861 validation_error_map[VALIDATION_ERROR_09c00126]); 1862 } 1863 1864 subresource_extent = GetImageSubresourceExtent(dst_image_state, &(regions[i].dstSubresource)); 1865 copy_extent = regions[i].extent; 1866 if (slice_override) copy_extent.depth = depth_slices; 1867 extent_check = ExceedsBounds(&(regions[i].dstOffset), ©_extent, &subresource_extent); 1868 if (extent_check & x_bit) { 1869 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1870 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0012c, "IMAGE", 1871 "vkCmdCopyImage: Dest image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource " 1872 "width [%1d]. %s", 1873 i, regions[i].dstOffset.x, regions[i].extent.width, subresource_extent.width, 1874 validation_error_map[VALIDATION_ERROR_09c0012c]); 1875 } 1876 if (extent_check & y_bit) { 1877 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1878 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c0012e, "IMAGE", 1879 "vkCmdCopyImage: Dest image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource " 1880 "height [%1d]. %s", 1881 i, regions[i].dstOffset.y, regions[i].extent.height, subresource_extent.height, 1882 validation_error_map[VALIDATION_ERROR_09c0012e]); 1883 } 1884 if (extent_check & z_bit) { 1885 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1886 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_09c00132, "IMAGE", 1887 "vkCmdCopyImage: Dest image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource " 1888 "depth [%1d]. %s", 1889 i, regions[i].dstOffset.z, copy_extent.depth, subresource_extent.depth, 1890 validation_error_map[VALIDATION_ERROR_09c00132]); 1891 } 1892 1893 // The union of all source regions, and the union of all destination regions, specified by the elements of regions, 1894 // must not overlap in memory 1895 if (src_image_state->image == dst_image_state->image) { 1896 for (uint32_t j = 0; j < region_count; j++) { 1897 if (RegionIntersects(®ions[i], ®ions[j], src_image_state->createInfo.imageType)) { 1898 std::stringstream ss; 1899 ss << "vkCmdCopyImage: pRegions[" << i << "] src overlaps with pRegions[" << j << "]."; 1900 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1901 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_190000f8, "IMAGE", "%s. %s", 1902 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_190000f8]); 1903 } 1904 } 1905 } 1906 } 1907 1908 // The formats of src_image and dst_image must be compatible. Formats are considered compatible if their texel size in bytes 1909 // is the same between both formats. For example, VK_FORMAT_R8G8B8A8_UNORM is compatible with VK_FORMAT_R32_UINT because 1910 // because both texels are 4 bytes in size. Depth/stencil formats must match exactly. 1911 if (FormatIsDepthOrStencil(src_image_state->createInfo.format) || FormatIsDepthOrStencil(dst_image_state->createInfo.format)) { 1912 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) { 1913 char const str[] = "vkCmdCopyImage called with unmatched source and dest image depth/stencil formats."; 1914 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1915 HandleToUint64(command_buffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str); 1916 } 1917 } else { 1918 size_t srcSize = FormatSize(src_image_state->createInfo.format); 1919 size_t destSize = FormatSize(dst_image_state->createInfo.format); 1920 if (srcSize != destSize) { 1921 char const str[] = "vkCmdCopyImage called with unmatched source and dest image format sizes."; 1922 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1923 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_1900010e, "IMAGE", "%s. %s", str, 1924 validation_error_map[VALIDATION_ERROR_1900010e]); 1925 } 1926 } 1927 1928 // Source and dest image sample counts must match 1929 if (src_image_state->createInfo.samples != dst_image_state->createInfo.samples) { 1930 char const str[] = "vkCmdCopyImage() called on image pair with non-identical sample counts."; 1931 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 1932 HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_19000110, "IMAGE", "%s %s", str, 1933 validation_error_map[VALIDATION_ERROR_19000110]); 1934 } 1935 1936 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImage()", VALIDATION_ERROR_190000fe); 1937 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyImage()", VALIDATION_ERROR_19000108); 1938 // Validate that SRC & DST images have correct usage flags set 1939 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, VALIDATION_ERROR_190000fc, 1940 "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 1941 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_19000106, 1942 "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 1943 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyImage()", 1944 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_19002415); 1945 skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()"); 1946 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyImage()", VALIDATION_ERROR_19000017); 1947 bool hit_error = false; 1948 for (uint32_t i = 0; i < region_count; ++i) { 1949 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, regions[i].srcSubresource, src_image_layout, 1950 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_19000102, &hit_error); 1951 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout, 1952 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_1900010c, &hit_error); 1953 skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, src_image_state, dst_image_state, 1954 ®ions[i], i, "vkCmdCopyImage()"); 1955 } 1956 1957 return skip; 1958} 1959 1960void PreCallRecordCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 1961 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageCopy *regions, 1962 VkImageLayout src_image_layout, VkImageLayout dst_image_layout) { 1963 // Make sure that all image slices are updated to correct layout 1964 for (uint32_t i = 0; i < region_count; ++i) { 1965 SetImageLayout(device_data, cb_node, src_image_state, regions[i].srcSubresource, src_image_layout); 1966 SetImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout); 1967 } 1968 // Update bindings between images and cmd buffer 1969 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 1970 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 1971 std::function<bool()> function = [=]() { return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdCopyImage()"); }; 1972 cb_node->validate_functions.push_back(function); 1973 function = [=]() { 1974 SetImageMemoryValid(device_data, dst_image_state, true); 1975 return false; 1976 }; 1977 cb_node->validate_functions.push_back(function); 1978 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_COPYIMAGE); 1979} 1980 1981// Returns true if sub_rect is entirely contained within rect 1982static inline bool ContainsRect(VkRect2D rect, VkRect2D sub_rect) { 1983 if ((sub_rect.offset.x < rect.offset.x) || (sub_rect.offset.x + sub_rect.extent.width > rect.offset.x + rect.extent.width) || 1984 (sub_rect.offset.y < rect.offset.y) || (sub_rect.offset.y + sub_rect.extent.height > rect.offset.y + rect.extent.height)) 1985 return false; 1986 return true; 1987} 1988 1989bool PreCallValidateCmdClearAttachments(layer_data *device_data, VkCommandBuffer commandBuffer, uint32_t attachmentCount, 1990 const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects) { 1991 GLOBAL_CB_NODE *cb_node = GetCBNode(device_data, commandBuffer); 1992 const debug_report_data *report_data = core_validation::GetReportData(device_data); 1993 1994 bool skip = false; 1995 if (cb_node) { 1996 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT, 1997 VALIDATION_ERROR_18602415); 1998 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()"); 1999 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_CLEARATTACHMENTS); 2000 // Warn if this is issued prior to Draw Cmd and clearing the entire attachment 2001 if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) && 2002 (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) { 2003 // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass) 2004 // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call 2005 // CmdClearAttachments. 2006 skip |= 2007 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2008 HandleToUint64(commandBuffer), 0, DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, "DS", 2009 "vkCmdClearAttachments() issued on command buffer object 0x%p prior to any Draw Cmds." 2010 " It is recommended you use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.", 2011 commandBuffer); 2012 } 2013 skip |= outsideRenderPass(device_data, cb_node, "vkCmdClearAttachments()", VALIDATION_ERROR_18600017); 2014 } 2015 2016 // Validate that attachment is in reference list of active subpass 2017 if (cb_node->activeRenderPass) { 2018 const VkRenderPassCreateInfo *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr(); 2019 const VkSubpassDescription *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass]; 2020 auto framebuffer = GetFramebufferState(device_data, cb_node->activeFramebuffer); 2021 2022 for (uint32_t i = 0; i < attachmentCount; i++) { 2023 auto clear_desc = &pAttachments[i]; 2024 VkImageView image_view = VK_NULL_HANDLE; 2025 2026 if (0 == clear_desc->aspectMask) { 2027 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2028 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00c03, "IMAGE", "%s", 2029 validation_error_map[VALIDATION_ERROR_01c00c03]); 2030 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) { 2031 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2032 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00028, "IMAGE", "%s", 2033 validation_error_map[VALIDATION_ERROR_01c00028]); 2034 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) { 2035 if (clear_desc->colorAttachment >= subpass_desc->colorAttachmentCount) { 2036 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2037 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_1860001e, "DS", 2038 "vkCmdClearAttachments() color attachment index %d out of range for active subpass %d. %s", 2039 clear_desc->colorAttachment, cb_node->activeSubpass, 2040 validation_error_map[VALIDATION_ERROR_1860001e]); 2041 } else if (subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment == VK_ATTACHMENT_UNUSED) { 2042 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2043 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(commandBuffer), __LINE__, 2044 DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS", 2045 "vkCmdClearAttachments() color attachment index %d is VK_ATTACHMENT_UNUSED; ignored.", 2046 clear_desc->colorAttachment); 2047 } else { 2048 image_view = framebuffer->createInfo 2049 .pAttachments[subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment]; 2050 } 2051 if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || 2052 (clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) { 2053 char const str[] = 2054 "vkCmdClearAttachments aspectMask [%d] must set only VK_IMAGE_ASPECT_COLOR_BIT of a color attachment. %s"; 2055 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2056 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00026, "IMAGE", str, i, 2057 validation_error_map[VALIDATION_ERROR_01c00026]); 2058 } 2059 } else { // Must be depth and/or stencil 2060 if (((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) && 2061 ((clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) { 2062 char const str[] = "vkCmdClearAttachments aspectMask [%d] is not a valid combination of bits. %s"; 2063 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2064 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_01c00c01, "IMAGE", str, i, 2065 validation_error_map[VALIDATION_ERROR_01c00c01]); 2066 } 2067 if (!subpass_desc->pDepthStencilAttachment || 2068 (subpass_desc->pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED)) { 2069 skip |= log_msg( 2070 report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2071 HandleToUint64(commandBuffer), __LINE__, DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS", 2072 "vkCmdClearAttachments() depth/stencil clear with no depth/stencil attachment in subpass; ignored"); 2073 } else { 2074 image_view = framebuffer->createInfo.pAttachments[subpass_desc->pDepthStencilAttachment->attachment]; 2075 } 2076 } 2077 if (image_view) { 2078 auto image_view_state = GetImageViewState(device_data, image_view); 2079 for (uint32_t j = 0; j < rectCount; j++) { 2080 // The rectangular region specified by a given element of pRects must be contained within the render area of 2081 // the current render pass instance 2082 // TODO: This check should be moved to CmdExecuteCommands or QueueSubmit to cover secondary CB cases 2083 if ((cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) && 2084 (false == ContainsRect(cb_node->activeRenderPassBeginInfo.renderArea, pRects[j].rect))) { 2085 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2086 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_18600020, "DS", 2087 "vkCmdClearAttachments(): The area defined by pRects[%d] is not contained in the area of " 2088 "the current render pass instance. %s", 2089 j, validation_error_map[VALIDATION_ERROR_18600020]); 2090 } 2091 // The layers specified by a given element of pRects must be contained within every attachment that 2092 // pAttachments refers to 2093 auto attachment_layer_count = image_view_state->create_info.subresourceRange.layerCount; 2094 if ((pRects[j].baseArrayLayer >= attachment_layer_count) || 2095 (pRects[j].baseArrayLayer + pRects[j].layerCount > attachment_layer_count)) { 2096 skip |= 2097 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2098 HandleToUint64(commandBuffer), __LINE__, VALIDATION_ERROR_18600022, "DS", 2099 "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers of " 2100 "pAttachment[%d]. %s", 2101 j, i, validation_error_map[VALIDATION_ERROR_18600022]); 2102 } 2103 } 2104 } 2105 } 2106 } 2107 return skip; 2108} 2109 2110bool PreCallValidateCmdResolveImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2111 IMAGE_STATE *dst_image_state, uint32_t regionCount, const VkImageResolve *pRegions) { 2112 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2113 bool skip = false; 2114 if (cb_node && src_image_state && dst_image_state) { 2115 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdResolveImage()", VALIDATION_ERROR_1c800200); 2116 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdResolveImage()", VALIDATION_ERROR_1c800204); 2117 skip |= 2118 ValidateCmdQueueFlags(device_data, cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT, VALIDATION_ERROR_1c802415); 2119 skip |= ValidateCmd(device_data, cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()"); 2120 skip |= insideRenderPass(device_data, cb_node, "vkCmdResolveImage()", VALIDATION_ERROR_1c800017); 2121 2122 // For each region, the number of layers in the image subresource should not be zero 2123 // For each region, src and dest image aspect must be color only 2124 for (uint32_t i = 0; i < regionCount; i++) { 2125 if (pRegions[i].srcSubresource.layerCount == 0) { 2126 char const str[] = "vkCmdResolveImage: number of layers in source subresource is zero"; 2127 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2128 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2129 } 2130 if (pRegions[i].dstSubresource.layerCount == 0) { 2131 char const str[] = "vkCmdResolveImage: number of layers in destination subresource is zero"; 2132 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2133 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2134 } 2135 if (pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount) { 2136 skip |= log_msg( 2137 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2138 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_0a200216, "IMAGE", 2139 "vkCmdResolveImage: layerCount in source and destination subresource of pRegions[%d] does not match. %s", i, 2140 validation_error_map[VALIDATION_ERROR_0a200216]); 2141 } 2142 if ((pRegions[i].srcSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) || 2143 (pRegions[i].dstSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT)) { 2144 char const str[] = 2145 "vkCmdResolveImage: src and dest aspectMasks for each region must specify only VK_IMAGE_ASPECT_COLOR_BIT"; 2146 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2147 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_0a200214, "IMAGE", "%s. %s", str, 2148 validation_error_map[VALIDATION_ERROR_0a200214]); 2149 } 2150 } 2151 2152 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) { 2153 char const str[] = "vkCmdResolveImage called with unmatched source and dest formats."; 2154 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2155 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str); 2156 } 2157 if (src_image_state->createInfo.imageType != dst_image_state->createInfo.imageType) { 2158 char const str[] = "vkCmdResolveImage called with unmatched source and dest image types."; 2159 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2160 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_TYPE, "IMAGE", str); 2161 } 2162 if (src_image_state->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) { 2163 char const str[] = "vkCmdResolveImage called with source sample count less than 2."; 2164 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2165 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_1c800202, "IMAGE", "%s. %s", str, 2166 validation_error_map[VALIDATION_ERROR_1c800202]); 2167 } 2168 if (dst_image_state->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) { 2169 char const str[] = "vkCmdResolveImage called with dest sample count greater than 1."; 2170 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2171 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_1c800206, "IMAGE", "%s. %s", str, 2172 validation_error_map[VALIDATION_ERROR_1c800206]); 2173 } 2174 // TODO: Need to validate image layouts, which will include layout validation for shared presentable images 2175 } else { 2176 assert(0); 2177 } 2178 return skip; 2179} 2180 2181void PreCallRecordCmdResolveImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2182 IMAGE_STATE *dst_image_state) { 2183 // Update bindings between images and cmd buffer 2184 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 2185 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 2186 2187 std::function<bool()> function = [=]() { 2188 return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdResolveImage()"); 2189 }; 2190 cb_node->validate_functions.push_back(function); 2191 function = [=]() { 2192 SetImageMemoryValid(device_data, dst_image_state, true); 2193 return false; 2194 }; 2195 cb_node->validate_functions.push_back(function); 2196 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_RESOLVEIMAGE); 2197} 2198 2199bool PreCallValidateCmdBlitImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2200 IMAGE_STATE *dst_image_state, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) { 2201 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2202 2203 bool skip = false; 2204 if (cb_node && src_image_state && dst_image_state) { 2205 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage", 2206 VALIDATION_ERROR_184001d2); 2207 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage", 2208 VALIDATION_ERROR_184001d4); 2209 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdBlitImage()", VALIDATION_ERROR_184001b8); 2210 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdBlitImage()", VALIDATION_ERROR_184001c2); 2211 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, 2212 VALIDATION_ERROR_184001b6, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 2213 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, 2214 VALIDATION_ERROR_184001c0, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 2215 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT, VALIDATION_ERROR_18402415); 2216 skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()"); 2217 skip |= insideRenderPass(device_data, cb_node, "vkCmdBlitImage()", VALIDATION_ERROR_18400017); 2218 // TODO: Need to validate image layouts, which will include layout validation for shared presentable images 2219 2220 for (uint32_t i = 0; i < regionCount; i++) { 2221 VkImageBlit rgn = pRegions[i]; 2222 2223 // Warn for zero-sized regions 2224 if ((rgn.srcOffsets[0].x == rgn.srcOffsets[1].x) || (rgn.srcOffsets[0].y == rgn.srcOffsets[1].y) || 2225 (rgn.srcOffsets[0].z == rgn.srcOffsets[1].z)) { 2226 std::stringstream ss; 2227 ss << "vkCmdBlitImage: pRegions[" << i << "].srcOffsets specify a zero-volume area."; 2228 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2229 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_EXTENTS, "IMAGE", "%s", 2230 ss.str().c_str()); 2231 } 2232 if ((rgn.dstOffsets[0].x == rgn.dstOffsets[1].x) || (rgn.dstOffsets[0].y == rgn.dstOffsets[1].y) || 2233 (rgn.dstOffsets[0].z == rgn.dstOffsets[1].z)) { 2234 std::stringstream ss; 2235 ss << "vkCmdBlitImage: pRegions[" << i << "].dstOffsets specify a zero-volume area."; 2236 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2237 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_EXTENTS, "IMAGE", "%s", 2238 ss.str().c_str()); 2239 } 2240 if (rgn.srcSubresource.layerCount == 0) { 2241 char const str[] = "vkCmdBlitImage: number of layers in source subresource is zero"; 2242 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2243 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2244 } 2245 if (rgn.dstSubresource.layerCount == 0) { 2246 char const str[] = "vkCmdBlitImage: number of layers in destination subresource is zero"; 2247 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2248 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str); 2249 } 2250 2251 // Check that src/dst layercounts match 2252 if (rgn.srcSubresource.layerCount != rgn.dstSubresource.layerCount) { 2253 skip |= 2254 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2255 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001de, "IMAGE", 2256 "vkCmdBlitImage: layerCount in source and destination subresource of pRegions[%d] does not match. %s", 2257 i, validation_error_map[VALIDATION_ERROR_09a001de]); 2258 } 2259 2260 if (rgn.srcSubresource.aspectMask != rgn.dstSubresource.aspectMask) { 2261 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2262 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001dc, "IMAGE", 2263 "vkCmdBlitImage: aspectMask members for pRegion[%d] do not match. %s", i, 2264 validation_error_map[VALIDATION_ERROR_09a001dc]); 2265 } 2266 2267 // Validate source image offsets 2268 VkExtent3D src_extent = GetImageSubresourceExtent(src_image_state, &(rgn.srcSubresource)); 2269 if (VK_IMAGE_TYPE_1D == src_image_state->createInfo.imageType) { 2270 if ((0 != rgn.srcOffsets[0].y) || (1 != rgn.srcOffsets[1].y)) { 2271 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2272 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ea, "IMAGE", 2273 "vkCmdBlitImage: region [%d], source image of type VK_IMAGE_TYPE_1D with srcOffset[].y values " 2274 "of (%1d, %1d). These must be (0, 1). %s", 2275 i, rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, validation_error_map[VALIDATION_ERROR_09a001ea]); 2276 } 2277 } 2278 2279 if ((VK_IMAGE_TYPE_1D == src_image_state->createInfo.imageType) || 2280 (VK_IMAGE_TYPE_2D == src_image_state->createInfo.imageType)) { 2281 if ((0 != rgn.srcOffsets[0].z) || (1 != rgn.srcOffsets[1].z)) { 2282 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2283 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ee, "IMAGE", 2284 "vkCmdBlitImage: region [%d], source image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with " 2285 "srcOffset[].z values of (%1d, %1d). These must be (0, 1). %s", 2286 i, rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, validation_error_map[VALIDATION_ERROR_09a001ee]); 2287 } 2288 } 2289 2290 if ((rgn.srcOffsets[0].x < 0) || (rgn.srcOffsets[0].x > static_cast<int32_t>(src_extent.width)) || 2291 (rgn.srcOffsets[1].x < 0) || (rgn.srcOffsets[1].x > static_cast<int32_t>(src_extent.width))) { 2292 skip |= log_msg( 2293 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2294 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e6, "IMAGE", 2295 "vkCmdBlitImage: region [%d] srcOffset[].x values (%1d, %1d) exceed srcSubresource width extent (%1d). %s", i, 2296 rgn.srcOffsets[0].x, rgn.srcOffsets[1].x, src_extent.width, validation_error_map[VALIDATION_ERROR_09a001e6]); 2297 } 2298 if ((rgn.srcOffsets[0].y < 0) || (rgn.srcOffsets[0].y > static_cast<int32_t>(src_extent.height)) || 2299 (rgn.srcOffsets[1].y < 0) || (rgn.srcOffsets[1].y > static_cast<int32_t>(src_extent.height))) { 2300 skip |= log_msg( 2301 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2302 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001e8, "IMAGE", 2303 "vkCmdBlitImage: region [%d] srcOffset[].y values (%1d, %1d) exceed srcSubresource height extent (%1d). %s", i, 2304 rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, src_extent.height, validation_error_map[VALIDATION_ERROR_09a001e8]); 2305 } 2306 if ((rgn.srcOffsets[0].z < 0) || (rgn.srcOffsets[0].z > static_cast<int32_t>(src_extent.depth)) || 2307 (rgn.srcOffsets[1].z < 0) || (rgn.srcOffsets[1].z > static_cast<int32_t>(src_extent.depth))) { 2308 skip |= log_msg( 2309 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2310 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001ec, "IMAGE", 2311 "vkCmdBlitImage: region [%d] srcOffset[].z values (%1d, %1d) exceed srcSubresource depth extent (%1d). %s", i, 2312 rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, src_extent.depth, validation_error_map[VALIDATION_ERROR_09a001ec]); 2313 } 2314 2315 // Validate dest image offsets 2316 VkExtent3D dst_extent = GetImageSubresourceExtent(dst_image_state, &(rgn.dstSubresource)); 2317 if (VK_IMAGE_TYPE_1D == dst_image_state->createInfo.imageType) { 2318 if ((0 != rgn.dstOffsets[0].y) || (1 != rgn.dstOffsets[1].y)) { 2319 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2320 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f4, "IMAGE", 2321 "vkCmdBlitImage: region [%d], dest image of type VK_IMAGE_TYPE_1D with dstOffset[].y values of " 2322 "(%1d, %1d). These must be (0, 1). %s", 2323 i, rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, validation_error_map[VALIDATION_ERROR_09a001f4]); 2324 } 2325 } 2326 2327 if ((VK_IMAGE_TYPE_1D == dst_image_state->createInfo.imageType) || 2328 (VK_IMAGE_TYPE_2D == dst_image_state->createInfo.imageType)) { 2329 if ((0 != rgn.dstOffsets[0].z) || (1 != rgn.dstOffsets[1].z)) { 2330 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2331 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f8, "IMAGE", 2332 "vkCmdBlitImage: region [%d], dest image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with " 2333 "dstOffset[].z values of (%1d, %1d). These must be (0, 1). %s", 2334 i, rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, validation_error_map[VALIDATION_ERROR_09a001f8]); 2335 } 2336 } 2337 2338 if ((rgn.dstOffsets[0].x < 0) || (rgn.dstOffsets[0].x > static_cast<int32_t>(dst_extent.width)) || 2339 (rgn.dstOffsets[1].x < 0) || (rgn.dstOffsets[1].x > static_cast<int32_t>(dst_extent.width))) { 2340 skip |= log_msg( 2341 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2342 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f0, "IMAGE", 2343 "vkCmdBlitImage: region [%d] dstOffset[].x values (%1d, %1d) exceed dstSubresource width extent (%1d). %s", i, 2344 rgn.dstOffsets[0].x, rgn.dstOffsets[1].x, dst_extent.width, validation_error_map[VALIDATION_ERROR_09a001f0]); 2345 } 2346 if ((rgn.dstOffsets[0].y < 0) || (rgn.dstOffsets[0].y > static_cast<int32_t>(dst_extent.height)) || 2347 (rgn.dstOffsets[1].y < 0) || (rgn.dstOffsets[1].y > static_cast<int32_t>(dst_extent.height))) { 2348 skip |= log_msg( 2349 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2350 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f2, "IMAGE", 2351 "vkCmdBlitImage: region [%d] dstOffset[].y values (%1d, %1d) exceed dstSubresource height extent (%1d). %s", i, 2352 rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, dst_extent.height, validation_error_map[VALIDATION_ERROR_09a001f2]); 2353 } 2354 if ((rgn.dstOffsets[0].z < 0) || (rgn.dstOffsets[0].z > static_cast<int32_t>(dst_extent.depth)) || 2355 (rgn.dstOffsets[1].z < 0) || (rgn.dstOffsets[1].z > static_cast<int32_t>(dst_extent.depth))) { 2356 skip |= log_msg( 2357 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2358 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_09a001f6, "IMAGE", 2359 "vkCmdBlitImage: region [%d] dstOffset[].z values (%1d, %1d) exceed dstSubresource depth extent (%1d). %s", i, 2360 rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, dst_extent.depth, validation_error_map[VALIDATION_ERROR_09a001f6]); 2361 } 2362 } 2363 2364 VkFormat src_format = src_image_state->createInfo.format; 2365 VkFormat dst_format = dst_image_state->createInfo.format; 2366 2367 // Validate consistency for unsigned formats 2368 if (FormatIsUInt(src_format) != FormatIsUInt(dst_format)) { 2369 std::stringstream ss; 2370 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has unsigned integer format, " 2371 << "the other one must also have unsigned integer format. " 2372 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format); 2373 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2374 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001cc, "IMAGE", "%s. %s", 2375 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001cc]); 2376 } 2377 2378 // Validate consistency for signed formats 2379 if (FormatIsSInt(src_format) != FormatIsSInt(dst_format)) { 2380 std::stringstream ss; 2381 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has signed integer format, " 2382 << "the other one must also have signed integer format. " 2383 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format); 2384 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2385 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ca, "IMAGE", "%s. %s", 2386 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001ca]); 2387 } 2388 2389 // Validate aspect bits and formats for depth/stencil images 2390 if (FormatIsDepthOrStencil(src_format) || FormatIsDepthOrStencil(dst_format)) { 2391 if (src_format != dst_format) { 2392 std::stringstream ss; 2393 ss << "vkCmdBlitImage: If one of srcImage and dstImage images has a format of depth, stencil or depth " 2394 << "stencil, the other one must have exactly the same format. " 2395 << "Source format is " << string_VkFormat(src_format) << " Destination format is " 2396 << string_VkFormat(dst_format); 2397 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2398 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001ce, "IMAGE", "%s. %s", 2399 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001ce]); 2400 } 2401 2402#if 0 // TODO: Cannot find VU statements or spec language for these in CmdBlitImage. Verify or remove. 2403 for (uint32_t i = 0; i < regionCount; i++) { 2404 VkImageAspectFlags srcAspect = pRegions[i].srcSubresource.aspectMask; 2405 2406 if (FormatIsDepthAndStencil(src_format)) { 2407 if ((srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT)) { 2408 std::stringstream ss; 2409 ss << "vkCmdBlitImage: Combination depth/stencil image formats must have only one of " 2410 "VK_IMAGE_ASPECT_DEPTH_BIT " 2411 << "and VK_IMAGE_ASPECT_STENCIL_BIT set in srcImage and dstImage"; 2412 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2413 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2414 "%s", ss.str().c_str()); 2415 } 2416 } else if (FormatIsStencilOnly(src_format)) { 2417 if (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT) { 2418 std::stringstream ss; 2419 ss << "vkCmdBlitImage: Stencil-only image formats must have only the VK_IMAGE_ASPECT_STENCIL_BIT " 2420 << "set in both the srcImage and dstImage"; 2421 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2422 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2423 "%s", ss.str().c_str()); 2424 } 2425 } else if (FormatIsDepthOnly(src_format)) { 2426 if (srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) { 2427 std::stringstream ss; 2428 ss << "vkCmdBlitImage: Depth-only image formats must have only the VK_IMAGE_ASPECT_DEPTH " 2429 << "set in both the srcImage and dstImage"; 2430 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2431 HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", 2432 "%s", ss.str().c_str()); 2433 } 2434 } 2435 } 2436#endif 2437 } // Depth/Stencil 2438 2439 // Validate filter 2440 if (FormatIsDepthOrStencil(src_format) && (filter != VK_FILTER_NEAREST)) { 2441 std::stringstream ss; 2442 ss << "vkCmdBlitImage: If the format of srcImage is a depth, stencil, or depth stencil " 2443 << "then filter must be VK_FILTER_NEAREST."; 2444 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2445 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_184001d0, "IMAGE", "%s. %s", 2446 ss.str().c_str(), validation_error_map[VALIDATION_ERROR_184001d0]); 2447 } 2448 } else { 2449 assert(0); 2450 } 2451 return skip; 2452} 2453 2454void PreCallRecordCmdBlitImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 2455 IMAGE_STATE *dst_image_state) { 2456 // Update bindings between images and cmd buffer 2457 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 2458 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 2459 2460 std::function<bool()> function = [=]() { return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdBlitImage()"); }; 2461 cb_node->validate_functions.push_back(function); 2462 function = [=]() { 2463 SetImageMemoryValid(device_data, dst_image_state, true); 2464 return false; 2465 }; 2466 cb_node->validate_functions.push_back(function); 2467 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_BLITIMAGE); 2468} 2469 2470// This validates that the initial layout specified in the command buffer for 2471// the IMAGE is the same 2472// as the global IMAGE layout 2473bool ValidateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, 2474 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap) { 2475 bool skip = false; 2476 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2477 for (auto cb_image_data : pCB->imageLayoutMap) { 2478 VkImageLayout imageLayout; 2479 2480 if (FindLayout(imageLayoutMap, cb_image_data.first, imageLayout)) { 2481 if (cb_image_data.second.initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) { 2482 // TODO: Set memory invalid which is in mem_tracker currently 2483 } else if (imageLayout != cb_image_data.second.initialLayout) { 2484 if (cb_image_data.first.hasSubresource) { 2485 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2486 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2487 "Cannot submit cmd buffer using image (0x%" PRIx64 2488 ") [sub-resource: aspectMask 0x%X array layer %u, mip level %u], " 2489 "with layout %s when first use is %s.", 2490 HandleToUint64(cb_image_data.first.image), cb_image_data.first.subresource.aspectMask, 2491 cb_image_data.first.subresource.arrayLayer, cb_image_data.first.subresource.mipLevel, 2492 string_VkImageLayout(imageLayout), string_VkImageLayout(cb_image_data.second.initialLayout)); 2493 } else { 2494 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2495 HandleToUint64(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2496 "Cannot submit cmd buffer using image (0x%" PRIx64 2497 ") with layout %s when " 2498 "first use is %s.", 2499 HandleToUint64(cb_image_data.first.image), string_VkImageLayout(imageLayout), 2500 string_VkImageLayout(cb_image_data.second.initialLayout)); 2501 } 2502 } 2503 SetLayout(imageLayoutMap, cb_image_data.first, cb_image_data.second.layout); 2504 } 2505 } 2506 return skip; 2507} 2508 2509void UpdateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB) { 2510 for (auto cb_image_data : pCB->imageLayoutMap) { 2511 VkImageLayout imageLayout; 2512 FindGlobalLayout(device_data, cb_image_data.first, imageLayout); 2513 SetGlobalLayout(device_data, cb_image_data.first, cb_image_data.second.layout); 2514 } 2515} 2516 2517// Print readable FlagBits in FlagMask 2518static std::string string_VkAccessFlags(VkAccessFlags accessMask) { 2519 std::string result; 2520 std::string separator; 2521 2522 if (accessMask == 0) { 2523 result = "[None]"; 2524 } else { 2525 result = "["; 2526 for (auto i = 0; i < 32; i++) { 2527 if (accessMask & (1 << i)) { 2528 result = result + separator + string_VkAccessFlagBits((VkAccessFlagBits)(1 << i)); 2529 separator = " | "; 2530 } 2531 } 2532 result = result + "]"; 2533 } 2534 return result; 2535} 2536 2537// AccessFlags MUST have 'required_bit' set, and may have one or more of 'optional_bits' set. If required_bit is zero, accessMask 2538// must have at least one of 'optional_bits' set 2539// TODO: Add tracking to ensure that at least one barrier has been set for these layout transitions 2540static bool ValidateMaskBits(core_validation::layer_data *device_data, VkCommandBuffer cmdBuffer, const VkAccessFlags &accessMask, 2541 const VkImageLayout &layout, VkAccessFlags required_bit, VkAccessFlags optional_bits, 2542 const char *type) { 2543 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2544 bool skip = false; 2545 2546 if ((accessMask & required_bit) || (!required_bit && (accessMask & optional_bits))) { 2547 if (accessMask & ~(required_bit | optional_bits)) { 2548 // TODO: Verify against Valid Use 2549 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2550 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2551 "Additional bits in %s accessMask 0x%X %s are specified when layout is %s.", type, accessMask, 2552 string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout)); 2553 } 2554 } else { 2555 if (!required_bit) { 2556 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2557 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2558 "%s AccessMask %d %s must contain at least one of access bits %d " 2559 "%s when layout is %s, unless the app has previously added a " 2560 "barrier for this transition.", 2561 type, accessMask, string_VkAccessFlags(accessMask).c_str(), optional_bits, 2562 string_VkAccessFlags(optional_bits).c_str(), string_VkImageLayout(layout)); 2563 } else { 2564 std::string opt_bits; 2565 if (optional_bits != 0) { 2566 std::stringstream ss; 2567 ss << optional_bits; 2568 opt_bits = "and may have optional bits " + ss.str() + ' ' + string_VkAccessFlags(optional_bits); 2569 } 2570 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2571 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2572 "%s AccessMask %d %s must have required access bit %d %s %s when " 2573 "layout is %s, unless the app has previously added a barrier for " 2574 "this transition.", 2575 type, accessMask, string_VkAccessFlags(accessMask).c_str(), required_bit, 2576 string_VkAccessFlags(required_bit).c_str(), opt_bits.c_str(), string_VkImageLayout(layout)); 2577 } 2578 } 2579 return skip; 2580} 2581 2582bool ValidateMaskBitsFromLayouts(core_validation::layer_data *device_data, VkCommandBuffer cmdBuffer, 2583 const VkAccessFlags &accessMask, const VkImageLayout &layout, const char *type) { 2584 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2585 2586 bool skip = false; 2587 switch (layout) { 2588 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: { 2589 skip |= ValidateMaskBits(device_data, cmdBuffer, accessMask, layout, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, 2590 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type); 2591 break; 2592 } 2593 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: { 2594 skip |= ValidateMaskBits(device_data, cmdBuffer, accessMask, layout, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, 2595 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type); 2596 break; 2597 } 2598 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: { 2599 skip |= ValidateMaskBits(device_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_WRITE_BIT, 0, type); 2600 break; 2601 } 2602 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: { 2603 skip |= ValidateMaskBits( 2604 device_data, cmdBuffer, accessMask, layout, 0, 2605 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, 2606 type); 2607 break; 2608 } 2609 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: { 2610 skip |= ValidateMaskBits(device_data, cmdBuffer, accessMask, layout, 0, 2611 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT, type); 2612 break; 2613 } 2614 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: { 2615 skip |= ValidateMaskBits(device_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_READ_BIT, 0, type); 2616 break; 2617 } 2618 case VK_IMAGE_LAYOUT_UNDEFINED: { 2619 if (accessMask != 0) { 2620 // TODO: Verify against Valid Use section spec 2621 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2622 HandleToUint64(cmdBuffer), __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", 2623 "Additional bits in %s accessMask 0x%X %s are specified when layout is %s.", type, accessMask, 2624 string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout)); 2625 } 2626 break; 2627 } 2628 case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: 2629 // Notes: QueuePresentKHR performs automatic visibility operations, 2630 // so the app is /NOT/ required to include VK_ACCESS_MEMORY_READ_BIT 2631 // when transitioning to this layout. 2632 // 2633 // When transitioning /from/ this layout, the application needs to 2634 // avoid only a WAR hazard -- any writes need to be ordered after 2635 // the PE's reads. There is no need for a memory dependency for this 2636 // case. 2637 // Intentionally fall through 2638 2639 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: 2640 // Todo -- shouldn't be valid unless extension is enabled 2641 // Intentionally fall through 2642 2643 case VK_IMAGE_LAYOUT_GENERAL: 2644 default: { break; } 2645 } 2646 return skip; 2647} 2648 2649// ValidateLayoutVsAttachmentDescription is a general function where we can validate various state associated with the 2650// VkAttachmentDescription structs that are used by the sub-passes of a renderpass. Initial check is to make sure that READ_ONLY 2651// layout attachments don't have CLEAR as their loadOp. 2652bool ValidateLayoutVsAttachmentDescription(const debug_report_data *report_data, const VkImageLayout first_layout, 2653 const uint32_t attachment, const VkAttachmentDescription &attachment_description) { 2654 bool skip = false; 2655 // Verify that initial loadOp on READ_ONLY attachments is not CLEAR 2656 if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) { 2657 if ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) || 2658 (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)) { 2659 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2660 VALIDATION_ERROR_12200688, "DS", "Cannot clear attachment %d with invalid first layout %s. %s", 2661 attachment, string_VkImageLayout(first_layout), validation_error_map[VALIDATION_ERROR_12200688]); 2662 } 2663 } 2664 return skip; 2665} 2666 2667bool ValidateLayouts(core_validation::layer_data *device_data, VkDevice device, const VkRenderPassCreateInfo *pCreateInfo) { 2668 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2669 bool skip = false; 2670 2671 // Track when we're observing the first use of an attachment 2672 std::vector<bool> attach_first_use(pCreateInfo->attachmentCount, true); 2673 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) { 2674 const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i]; 2675 2676 // Check input attachments first, so we can detect first-use-as-input for VU #00349 2677 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) { 2678 auto attach_index = subpass.pInputAttachments[j].attachment; 2679 if (attach_index == VK_ATTACHMENT_UNUSED) continue; 2680 2681 switch (subpass.pInputAttachments[j].layout) { 2682 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 2683 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: 2684 // These are ideal. 2685 break; 2686 2687 case VK_IMAGE_LAYOUT_GENERAL: 2688 // May not be optimal. TODO: reconsider this warning based on other constraints. 2689 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2690 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2691 "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL."); 2692 break; 2693 2694 default: 2695 // No other layouts are acceptable 2696 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2697 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2698 "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.", 2699 string_VkImageLayout(subpass.pInputAttachments[j].layout)); 2700 } 2701 2702 VkImageLayout layout = subpass.pInputAttachments[j].layout; 2703 bool found_layout_mismatch = subpass.pDepthStencilAttachment && 2704 subpass.pDepthStencilAttachment->attachment == attach_index && 2705 subpass.pDepthStencilAttachment->layout != layout; 2706 for (uint32_t c = 0; !found_layout_mismatch && c < subpass.colorAttachmentCount; ++c) { 2707 found_layout_mismatch = 2708 (subpass.pColorAttachments[c].attachment == attach_index && subpass.pColorAttachments[c].layout != layout); 2709 } 2710 if (found_layout_mismatch) { 2711 skip |= log_msg( 2712 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2713 VALIDATION_ERROR_140006ae, "DS", 2714 "CreateRenderPass: Subpass %u pInputAttachments[%u] (%u) has layout %u, but is also used as a depth/color " 2715 "attachment with a different layout. %s", 2716 i, j, attach_index, layout, validation_error_map[VALIDATION_ERROR_140006ae]); 2717 } 2718 2719 if (attach_first_use[attach_index]) { 2720 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pInputAttachments[j].layout, attach_index, 2721 pCreateInfo->pAttachments[attach_index]); 2722 2723 bool used_as_depth = 2724 (subpass.pDepthStencilAttachment != NULL && subpass.pDepthStencilAttachment->attachment == attach_index); 2725 bool used_as_color = false; 2726 for (uint32_t k = 0; !used_as_depth && !used_as_color && k < subpass.colorAttachmentCount; ++k) { 2727 used_as_color = (subpass.pColorAttachments[k].attachment == attach_index); 2728 } 2729 if (!used_as_depth && !used_as_color && 2730 pCreateInfo->pAttachments[attach_index].loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) { 2731 skip |= log_msg( 2732 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2733 VALIDATION_ERROR_1400069c, "DS", 2734 "CreateRenderPass: attachment %u is first used as an input attachment in subpass %u with loadOp=CLEAR. %s", 2735 attach_index, attach_index, validation_error_map[VALIDATION_ERROR_1400069c]); 2736 } 2737 } 2738 attach_first_use[attach_index] = false; 2739 } 2740 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) { 2741 auto attach_index = subpass.pColorAttachments[j].attachment; 2742 if (attach_index == VK_ATTACHMENT_UNUSED) continue; 2743 2744 // TODO: Need a way to validate shared presentable images here, currently just allowing 2745 // VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR 2746 // as an acceptable layout, but need to make sure shared presentable images ONLY use that layout 2747 switch (subpass.pColorAttachments[j].layout) { 2748 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: 2749 // This is ideal. 2750 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: 2751 // TODO: See note above, just assuming that attachment is shared presentable and allowing this for now. 2752 break; 2753 2754 case VK_IMAGE_LAYOUT_GENERAL: 2755 // May not be optimal; TODO: reconsider this warning based on other constraints? 2756 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2757 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2758 "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL."); 2759 break; 2760 2761 default: 2762 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2763 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2764 "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.", 2765 string_VkImageLayout(subpass.pColorAttachments[j].layout)); 2766 } 2767 2768 if (attach_first_use[attach_index]) { 2769 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pColorAttachments[j].layout, attach_index, 2770 pCreateInfo->pAttachments[attach_index]); 2771 } 2772 attach_first_use[attach_index] = false; 2773 } 2774 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) { 2775 switch (subpass.pDepthStencilAttachment->layout) { 2776 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: 2777 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: 2778 // These are ideal. 2779 break; 2780 2781 case VK_IMAGE_LAYOUT_GENERAL: 2782 // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than 2783 // doing a bunch of transitions. 2784 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, 2785 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2786 "GENERAL layout for depth attachment may not give optimal performance."); 2787 break; 2788 2789 default: 2790 // No other layouts are acceptable 2791 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2792 __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2793 "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, " 2794 "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.", 2795 string_VkImageLayout(subpass.pDepthStencilAttachment->layout)); 2796 } 2797 2798 auto attach_index = subpass.pDepthStencilAttachment->attachment; 2799 if (attach_first_use[attach_index]) { 2800 skip |= ValidateLayoutVsAttachmentDescription(report_data, subpass.pDepthStencilAttachment->layout, attach_index, 2801 pCreateInfo->pAttachments[attach_index]); 2802 } 2803 attach_first_use[attach_index] = false; 2804 } 2805 } 2806 return skip; 2807} 2808 2809// For any image objects that overlap mapped memory, verify that their layouts are PREINIT or GENERAL 2810bool ValidateMapImageLayouts(core_validation::layer_data *device_data, VkDevice device, DEVICE_MEM_INFO const *mem_info, 2811 VkDeviceSize offset, VkDeviceSize end_offset) { 2812 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2813 bool skip = false; 2814 // Iterate over all bound image ranges and verify that for any that overlap the map ranges, the layouts are 2815 // VK_IMAGE_LAYOUT_PREINITIALIZED or VK_IMAGE_LAYOUT_GENERAL 2816 // TODO : This can be optimized if we store ranges based on starting address and early exit when we pass our range 2817 for (auto image_handle : mem_info->bound_images) { 2818 auto img_it = mem_info->bound_ranges.find(image_handle); 2819 if (img_it != mem_info->bound_ranges.end()) { 2820 if (rangesIntersect(device_data, &img_it->second, offset, end_offset)) { 2821 std::vector<VkImageLayout> layouts; 2822 if (FindLayouts(device_data, VkImage(image_handle), layouts)) { 2823 for (auto layout : layouts) { 2824 if (layout != VK_IMAGE_LAYOUT_PREINITIALIZED && layout != VK_IMAGE_LAYOUT_GENERAL) { 2825 skip |= 2826 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 2827 HandleToUint64(mem_info->mem), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", 2828 "Mapping an image with layout %s can result in undefined behavior if this memory is " 2829 "used by the device. Only GENERAL or PREINITIALIZED should be used.", 2830 string_VkImageLayout(layout)); 2831 } 2832 } 2833 } 2834 } 2835 } 2836 } 2837 return skip; 2838} 2839 2840// Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict 2841// is true, verify that (actual & desired) flags == desired 2842static bool validate_usage_flags(layer_data *device_data, VkFlags actual, VkFlags desired, VkBool32 strict, uint64_t obj_handle, 2843 VulkanObjectType obj_type, int32_t const msgCode, char const *func_name, char const *usage_str) { 2844 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2845 2846 bool correct_usage = false; 2847 bool skip = false; 2848 const char *type_str = object_string[obj_type]; 2849 if (strict) { 2850 correct_usage = ((actual & desired) == desired); 2851 } else { 2852 correct_usage = ((actual & desired) != 0); 2853 } 2854 if (!correct_usage) { 2855 if (msgCode == -1) { 2856 // TODO: Fix callers with msgCode == -1 to use correct validation checks. 2857 skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, __LINE__, 2858 MEMTRACK_INVALID_USAGE_FLAG, "MEM", 2859 "Invalid usage flag for %s 0x%" PRIxLEAST64 2860 " used by %s. In this case, %s should have %s set during creation.", 2861 type_str, obj_handle, func_name, type_str, usage_str); 2862 } else { 2863 const char *valid_usage = (msgCode == -1) ? "" : validation_error_map[msgCode]; 2864 skip = log_msg( 2865 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, __LINE__, msgCode, "MEM", 2866 "Invalid usage flag for %s 0x%" PRIxLEAST64 " used by %s. In this case, %s should have %s set during creation. %s", 2867 type_str, obj_handle, func_name, type_str, usage_str, valid_usage); 2868 } 2869 } 2870 return skip; 2871} 2872 2873// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above 2874// where an error will be flagged if usage is not correct 2875bool ValidateImageUsageFlags(layer_data *device_data, IMAGE_STATE const *image_state, VkFlags desired, bool strict, 2876 int32_t const msgCode, char const *func_name, char const *usage_string) { 2877 return validate_usage_flags(device_data, image_state->createInfo.usage, desired, strict, HandleToUint64(image_state->image), 2878 kVulkanObjectTypeImage, msgCode, func_name, usage_string); 2879} 2880 2881// Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above 2882// where an error will be flagged if usage is not correct 2883bool ValidateBufferUsageFlags(layer_data *device_data, BUFFER_STATE const *buffer_state, VkFlags desired, bool strict, 2884 int32_t const msgCode, char const *func_name, char const *usage_string) { 2885 return validate_usage_flags(device_data, buffer_state->createInfo.usage, desired, strict, HandleToUint64(buffer_state->buffer), 2886 kVulkanObjectTypeBuffer, msgCode, func_name, usage_string); 2887} 2888 2889bool PreCallValidateCreateBuffer(layer_data *device_data, const VkBufferCreateInfo *pCreateInfo) { 2890 bool skip = false; 2891 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2892 2893 // TODO: Add check for VALIDATION_ERROR_1ec0071e 2894 // TODO: Add check for VALIDATION_ERROR_01400728 2895 // TODO: Add check for VALIDATION_ERROR_0140072a 2896 // TODO: Add check for VALIDATION_ERROR_0140072c 2897 2898 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures(device_data)->sparseBinding)) { 2899 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2900 VALIDATION_ERROR_01400726, "DS", 2901 "vkCreateBuffer(): the sparseBinding device feature is disabled: Buffers cannot be created with the " 2902 "VK_BUFFER_CREATE_SPARSE_BINDING_BIT set. %s", 2903 validation_error_map[VALIDATION_ERROR_01400726]); 2904 } 2905 2906 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyBuffer)) { 2907 skip |= 2908 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2909 DRAWSTATE_INVALID_FEATURE, "DS", 2910 "vkCreateBuffer(): the sparseResidencyBuffer device feature is disabled: Buffers cannot be created with the " 2911 "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT set."); 2912 } 2913 2914 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures(device_data)->sparseResidencyAliased)) { 2915 skip |= 2916 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2917 DRAWSTATE_INVALID_FEATURE, "DS", 2918 "vkCreateBuffer(): the sparseResidencyAliased device feature is disabled: Buffers cannot be created with the " 2919 "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT set."); 2920 } 2921 return skip; 2922} 2923 2924void PostCallRecordCreateBuffer(layer_data *device_data, const VkBufferCreateInfo *pCreateInfo, VkBuffer *pBuffer) { 2925 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid 2926 GetBufferMap(device_data) 2927 ->insert(std::make_pair(*pBuffer, std::unique_ptr<BUFFER_STATE>(new BUFFER_STATE(*pBuffer, pCreateInfo)))); 2928} 2929 2930bool PreCallValidateCreateBufferView(layer_data *device_data, const VkBufferViewCreateInfo *pCreateInfo) { 2931 bool skip = false; 2932 BUFFER_STATE *buffer_state = GetBufferState(device_data, pCreateInfo->buffer); 2933 // If this isn't a sparse buffer, it needs to have memory backing it at CreateBufferView time 2934 if (buffer_state) { 2935 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCreateBufferView()", VALIDATION_ERROR_01a0074e); 2936 // In order to create a valid buffer view, the buffer must have been created with at least one of the following flags: 2937 // UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT 2938 skip |= ValidateBufferUsageFlags( 2939 device_data, buffer_state, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, false, 2940 VALIDATION_ERROR_01a00748, "vkCreateBufferView()", "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT"); 2941 } 2942 return skip; 2943} 2944 2945void PostCallRecordCreateBufferView(layer_data *device_data, const VkBufferViewCreateInfo *pCreateInfo, VkBufferView *pView) { 2946 (*GetBufferViewMap(device_data))[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo)); 2947} 2948 2949// For the given format verify that the aspect masks make sense 2950bool ValidateImageAspectMask(layer_data *device_data, VkImage image, VkFormat format, VkImageAspectFlags aspect_mask, 2951 const char *func_name) { 2952 const debug_report_data *report_data = core_validation::GetReportData(device_data); 2953 bool skip = false; 2954 if (FormatIsColor(format)) { 2955 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) { 2956 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2957 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2958 "%s: Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set. %s", func_name, 2959 validation_error_map[VALIDATION_ERROR_0a400c01]); 2960 } else if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != aspect_mask) { 2961 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2962 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2963 "%s: Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set. %s", func_name, 2964 validation_error_map[VALIDATION_ERROR_0a400c01]); 2965 } 2966 } else if (FormatIsDepthAndStencil(format)) { 2967 if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) { 2968 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2969 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2970 "%s: Depth/stencil image formats must have " 2971 "at least one of VK_IMAGE_ASPECT_DEPTH_BIT " 2972 "and VK_IMAGE_ASPECT_STENCIL_BIT set. %s", 2973 func_name, validation_error_map[VALIDATION_ERROR_0a400c01]); 2974 } else if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspect_mask) { 2975 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2976 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2977 "%s: Combination depth/stencil image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and " 2978 "VK_IMAGE_ASPECT_STENCIL_BIT set. %s", 2979 func_name, validation_error_map[VALIDATION_ERROR_0a400c01]); 2980 } 2981 } else if (FormatIsDepthOnly(format)) { 2982 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) { 2983 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2984 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2985 "%s: Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set. %s", func_name, 2986 validation_error_map[VALIDATION_ERROR_0a400c01]); 2987 } else if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != aspect_mask) { 2988 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2989 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2990 "%s: Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set. %s", func_name, 2991 validation_error_map[VALIDATION_ERROR_0a400c01]); 2992 } 2993 } else if (FormatIsStencilOnly(format)) { 2994 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT) { 2995 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 2996 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 2997 "%s: Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set. %s", func_name, 2998 validation_error_map[VALIDATION_ERROR_0a400c01]); 2999 } else if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != aspect_mask) { 3000 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3001 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3002 "%s: Stencil-only image formats can have only the VK_IMAGE_ASPECT_STENCIL_BIT set. %s", func_name, 3003 validation_error_map[VALIDATION_ERROR_0a400c01]); 3004 } 3005 } 3006 return skip; 3007} 3008 3009bool ValidateImageSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state, const bool is_imageview_2d_array, 3010 const VkImageSubresourceRange &subresourceRange, const char *cmd_name, const char *param_name) { 3011 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3012 bool skip = false; 3013 3014 // Validate mip levels 3015 const auto image_mip_count = image_state->createInfo.mipLevels; 3016 3017 if (subresourceRange.levelCount == 0) { 3018 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3019 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0a8007fc, "IMAGE", 3020 "%s: %s.levelCount is 0. %s", cmd_name, param_name, validation_error_map[VALIDATION_ERROR_0a8007fc]); 3021 } else if (subresourceRange.levelCount == VK_REMAINING_MIP_LEVELS) { 3022 // TODO: Not in the spec VUs. Probably missing -- KhronosGroup/Vulkan-Docs#416 3023 if (subresourceRange.baseMipLevel >= image_mip_count) { 3024 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3025 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_SUBRANGE, "IMAGE", 3026 "%s: %s.baseMipLevel (= %" PRIu32 ") is greater or equal to the mip level count of the image (i.e. " 3027 "greater or equal to %" PRIu32 ").", 3028 cmd_name, param_name, subresourceRange.baseMipLevel, image_mip_count); 3029 } 3030 } else { 3031 const uint64_t necessary_mip_count = uint64_t{subresourceRange.baseMipLevel} + uint64_t{subresourceRange.levelCount}; 3032 3033 if (necessary_mip_count > image_mip_count) { 3034 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3035 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0a8007fc, "IMAGE", 3036 "%s: %s.baseMipLevel + .levelCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64 3037 ") is greater than the " 3038 "mip level count of the image (i.e. greater than %" PRIu32 "). %s", 3039 cmd_name, param_name, subresourceRange.baseMipLevel, subresourceRange.levelCount, necessary_mip_count, 3040 image_mip_count, validation_error_map[VALIDATION_ERROR_0a8007fc]); 3041 } 3042 } 3043 3044 // Validate array layers 3045 bool is_khr_maintenance1 = GetDeviceExtensions(device_data)->vk_khr_maintenance1; 3046 bool is_3D_to_2D_map = is_khr_maintenance1 && image_state->createInfo.imageType == VK_IMAGE_TYPE_3D && is_imageview_2d_array; 3047 3048 const auto image_layer_count = is_3D_to_2D_map ? image_state->createInfo.extent.depth : image_state->createInfo.arrayLayers; 3049 const auto image_layer_count_var_name = is_3D_to_2D_map ? "extent.depth" : "arrayLayers"; 3050 3051 const auto invalid_layer_code = 3052 is_khr_maintenance1 ? (is_3D_to_2D_map ? VALIDATION_ERROR_0a800800 : VALIDATION_ERROR_0a800802) : VALIDATION_ERROR_0a8007fe; 3053 3054 if (subresourceRange.layerCount == 0) { 3055 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3056 HandleToUint64(image_state->image), __LINE__, invalid_layer_code, "IMAGE", "%s: %s.layerCount is 0. %s", 3057 cmd_name, param_name, validation_error_map[invalid_layer_code]); 3058 } else if (subresourceRange.layerCount == VK_REMAINING_ARRAY_LAYERS) { 3059 // TODO: Not in the spec VUs. Probably missing -- KhronosGroup/Vulkan-Docs#416 3060 if (subresourceRange.baseArrayLayer >= image_layer_count) { 3061 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3062 HandleToUint64(image_state->image), __LINE__, DRAWSTATE_INVALID_IMAGE_SUBRANGE, "IMAGE", 3063 "%s: %s.baseArrayLayer (= %" PRIu32 ") is greater or equal to the %s of the image when it was created " 3064 "(i.e. greater or equal to %" PRIu32 ").", 3065 cmd_name, param_name, subresourceRange.baseArrayLayer, image_layer_count_var_name, image_layer_count); 3066 } 3067 } else { 3068 const uint64_t necessary_layer_count = uint64_t{subresourceRange.baseArrayLayer} + uint64_t{subresourceRange.layerCount}; 3069 3070 if (necessary_layer_count > image_layer_count) { 3071 skip |= 3072 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3073 HandleToUint64(image_state->image), __LINE__, invalid_layer_code, "IMAGE", 3074 "%s: %s.baseArrayLayer + .layerCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64 ") is greater than the " 3075 "%s of the image when it was created (i.e. greater than %" PRIu32 "). %s", 3076 cmd_name, param_name, subresourceRange.baseArrayLayer, subresourceRange.layerCount, necessary_layer_count, 3077 image_layer_count_var_name, image_layer_count, validation_error_map[invalid_layer_code]); 3078 } 3079 } 3080 3081 return skip; 3082} 3083 3084bool PreCallValidateCreateImageView(layer_data *device_data, const VkImageViewCreateInfo *create_info) { 3085 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3086 bool skip = false; 3087 IMAGE_STATE *image_state = GetImageState(device_data, create_info->image); 3088 if (image_state) { 3089 skip |= ValidateImageUsageFlags( 3090 device_data, image_state, 3091 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | 3092 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 3093 false, -1, "vkCreateImageView()", 3094 "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT]_BIT"); 3095 // If this isn't a sparse image, it needs to have memory backing it at CreateImageView time 3096 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCreateImageView()", VALIDATION_ERROR_0ac007f8); 3097 // Checks imported from image layer 3098 skip |= ValidateImageSubresourceRange(device_data, image_state, create_info->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY, 3099 create_info->subresourceRange, "vkCreateImageView", "pCreateInfo->subresourceRange"); 3100 3101 VkImageCreateFlags image_flags = image_state->createInfo.flags; 3102 VkFormat image_format = image_state->createInfo.format; 3103 VkFormat view_format = create_info->format; 3104 VkImageAspectFlags aspect_mask = create_info->subresourceRange.aspectMask; 3105 3106 // Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state 3107 if (image_flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) { 3108 // Format MUST be compatible (in the same format compatibility class) as the format the image was created with 3109 if (FormatCompatibilityClass(image_format) != FormatCompatibilityClass(view_format)) { 3110 std::stringstream ss; 3111 ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format) 3112 << " is not in the same format compatibility class as image (" << HandleToUint64(create_info->image) 3113 << ") format " << string_VkFormat(image_format) 3114 << ". Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT " 3115 << "can support ImageViews with differing formats but they must be in the same compatibility class."; 3116 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3117 VALIDATION_ERROR_0ac007f4, "IMAGE", "%s %s", ss.str().c_str(), 3118 validation_error_map[VALIDATION_ERROR_0ac007f4]); 3119 } 3120 } else { 3121 // Format MUST be IDENTICAL to the format the image was created with 3122 if (image_format != view_format) { 3123 std::stringstream ss; 3124 ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from image " 3125 << HandleToUint64(create_info->image) << " format " << string_VkFormat(image_format) 3126 << ". Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation."; 3127 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3128 VALIDATION_ERROR_0ac007f6, "IMAGE", "%s %s", ss.str().c_str(), 3129 validation_error_map[VALIDATION_ERROR_0ac007f6]); 3130 } 3131 } 3132 3133 // Validate correct image aspect bits for desired formats and format consistency 3134 skip |= ValidateImageAspectMask(device_data, image_state->image, image_format, aspect_mask, "vkCreateImageView()"); 3135 } 3136 return skip; 3137} 3138 3139void PostCallRecordCreateImageView(layer_data *device_data, const VkImageViewCreateInfo *create_info, VkImageView view) { 3140 auto image_view_map = GetImageViewMap(device_data); 3141 (*image_view_map)[view] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(view, create_info)); 3142 3143 auto image_state = GetImageState(device_data, create_info->image); 3144 auto &sub_res_range = (*image_view_map)[view].get()->create_info.subresourceRange; 3145 sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels); 3146 sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers); 3147} 3148 3149bool PreCallValidateCmdCopyBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 3150 BUFFER_STATE *dst_buffer_state) { 3151 bool skip = false; 3152 skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c000ee); 3153 skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c000f2); 3154 // Validate that SRC & DST buffers have correct usage flags set 3155 skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, 3156 VALIDATION_ERROR_18c000ec, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 3157 skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, 3158 VALIDATION_ERROR_18c000f0, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 3159 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyBuffer()", 3160 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_18c02415); 3161 skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()"); 3162 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyBuffer()", VALIDATION_ERROR_18c00017); 3163 return skip; 3164} 3165 3166void PreCallRecordCmdCopyBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 3167 BUFFER_STATE *dst_buffer_state) { 3168 // Update bindings between buffers and cmd buffer 3169 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state); 3170 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state); 3171 3172 std::function<bool()> function = [=]() { 3173 return ValidateBufferMemoryIsValid(device_data, src_buffer_state, "vkCmdCopyBuffer()"); 3174 }; 3175 cb_node->validate_functions.push_back(function); 3176 function = [=]() { 3177 SetBufferMemoryValid(device_data, dst_buffer_state, true); 3178 return false; 3179 }; 3180 cb_node->validate_functions.push_back(function); 3181 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_COPYBUFFER); 3182} 3183 3184static bool validateIdleBuffer(layer_data *device_data, VkBuffer buffer) { 3185 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3186 bool skip = false; 3187 auto buffer_state = GetBufferState(device_data, buffer); 3188 if (!buffer_state) { 3189 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, HandleToUint64(buffer), 3190 __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS", 3191 "Cannot free buffer 0x%" PRIxLEAST64 " that has not been allocated.", HandleToUint64(buffer)); 3192 } else { 3193 if (buffer_state->in_use.load()) { 3194 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, 3195 HandleToUint64(buffer), __LINE__, VALIDATION_ERROR_23c00734, "DS", 3196 "Cannot free buffer 0x%" PRIxLEAST64 " that is in use by a command buffer. %s", HandleToUint64(buffer), 3197 validation_error_map[VALIDATION_ERROR_23c00734]); 3198 } 3199 } 3200 return skip; 3201} 3202 3203bool PreCallValidateDestroyImageView(layer_data *device_data, VkImageView image_view, IMAGE_VIEW_STATE **image_view_state, 3204 VK_OBJECT *obj_struct) { 3205 *image_view_state = GetImageViewState(device_data, image_view); 3206 *obj_struct = {HandleToUint64(image_view), kVulkanObjectTypeImageView}; 3207 if (GetDisables(device_data)->destroy_image_view) return false; 3208 bool skip = false; 3209 if (*image_view_state) { 3210 skip |= ValidateObjectNotInUse(device_data, *image_view_state, *obj_struct, VALIDATION_ERROR_25400804); 3211 } 3212 return skip; 3213} 3214 3215void PostCallRecordDestroyImageView(layer_data *device_data, VkImageView image_view, IMAGE_VIEW_STATE *image_view_state, 3216 VK_OBJECT obj_struct) { 3217 // Any bound cmd buffers are now invalid 3218 invalidateCommandBuffers(device_data, image_view_state->cb_bindings, obj_struct); 3219 (*GetImageViewMap(device_data)).erase(image_view); 3220} 3221 3222bool PreCallValidateDestroyBuffer(layer_data *device_data, VkBuffer buffer, BUFFER_STATE **buffer_state, VK_OBJECT *obj_struct) { 3223 *buffer_state = GetBufferState(device_data, buffer); 3224 *obj_struct = {HandleToUint64(buffer), kVulkanObjectTypeBuffer}; 3225 if (GetDisables(device_data)->destroy_buffer) return false; 3226 bool skip = false; 3227 if (*buffer_state) { 3228 skip |= validateIdleBuffer(device_data, buffer); 3229 } 3230 return skip; 3231} 3232 3233void PostCallRecordDestroyBuffer(layer_data *device_data, VkBuffer buffer, BUFFER_STATE *buffer_state, VK_OBJECT obj_struct) { 3234 invalidateCommandBuffers(device_data, buffer_state->cb_bindings, obj_struct); 3235 for (auto mem_binding : buffer_state->GetBoundMemory()) { 3236 auto mem_info = GetMemObjInfo(device_data, mem_binding); 3237 if (mem_info) { 3238 core_validation::RemoveBufferMemoryRange(HandleToUint64(buffer), mem_info); 3239 } 3240 } 3241 ClearMemoryObjectBindings(device_data, HandleToUint64(buffer), kVulkanObjectTypeBuffer); 3242 GetBufferMap(device_data)->erase(buffer_state->buffer); 3243} 3244 3245bool PreCallValidateDestroyBufferView(layer_data *device_data, VkBufferView buffer_view, BUFFER_VIEW_STATE **buffer_view_state, 3246 VK_OBJECT *obj_struct) { 3247 *buffer_view_state = GetBufferViewState(device_data, buffer_view); 3248 *obj_struct = {HandleToUint64(buffer_view), kVulkanObjectTypeBufferView}; 3249 if (GetDisables(device_data)->destroy_buffer_view) return false; 3250 bool skip = false; 3251 if (*buffer_view_state) { 3252 skip |= ValidateObjectNotInUse(device_data, *buffer_view_state, *obj_struct, VALIDATION_ERROR_23e00750); 3253 } 3254 return skip; 3255} 3256 3257void PostCallRecordDestroyBufferView(layer_data *device_data, VkBufferView buffer_view, BUFFER_VIEW_STATE *buffer_view_state, 3258 VK_OBJECT obj_struct) { 3259 // Any bound cmd buffers are now invalid 3260 invalidateCommandBuffers(device_data, buffer_view_state->cb_bindings, obj_struct); 3261 GetBufferViewMap(device_data)->erase(buffer_view); 3262} 3263 3264bool PreCallValidateCmdFillBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *buffer_state) { 3265 bool skip = false; 3266 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdFillBuffer()", VALIDATION_ERROR_1b40003e); 3267 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdFillBuffer()", 3268 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT, VALIDATION_ERROR_1b402415); 3269 skip |= ValidateCmd(device_data, cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()"); 3270 // Validate that DST buffer has correct usage flags set 3271 skip |= ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_1b40003a, 3272 "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 3273 skip |= insideRenderPass(device_data, cb_node, "vkCmdFillBuffer()", VALIDATION_ERROR_1b400017); 3274 return skip; 3275} 3276 3277void PreCallRecordCmdFillBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *buffer_state) { 3278 std::function<bool()> function = [=]() { 3279 SetBufferMemoryValid(device_data, buffer_state, true); 3280 return false; 3281 }; 3282 cb_node->validate_functions.push_back(function); 3283 // Update bindings between buffer and cmd buffer 3284 AddCommandBufferBindingBuffer(device_data, cb_node, buffer_state); 3285 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_FILLBUFFER); 3286} 3287 3288bool ValidateBufferImageCopyData(const debug_report_data *report_data, uint32_t regionCount, const VkBufferImageCopy *pRegions, 3289 IMAGE_STATE *image_state, const char *function) { 3290 bool skip = false; 3291 3292 for (uint32_t i = 0; i < regionCount; i++) { 3293 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) { 3294 if ((pRegions[i].imageOffset.y != 0) || (pRegions[i].imageExtent.height != 1)) { 3295 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3296 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160018e, "IMAGE", 3297 "%s(): pRegion[%d] imageOffset.y is %d and imageExtent.height is %d. For 1D images these " 3298 "must be 0 and 1, respectively. %s", 3299 function, i, pRegions[i].imageOffset.y, pRegions[i].imageExtent.height, 3300 validation_error_map[VALIDATION_ERROR_0160018e]); 3301 } 3302 } 3303 3304 if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (image_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) { 3305 if ((pRegions[i].imageOffset.z != 0) || (pRegions[i].imageExtent.depth != 1)) { 3306 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3307 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600192, "IMAGE", 3308 "%s(): pRegion[%d] imageOffset.z is %d and imageExtent.depth is %d. For 1D and 2D images these " 3309 "must be 0 and 1, respectively. %s", 3310 function, i, pRegions[i].imageOffset.z, pRegions[i].imageExtent.depth, 3311 validation_error_map[VALIDATION_ERROR_01600192]); 3312 } 3313 } 3314 3315 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) { 3316 if ((0 != pRegions[i].imageSubresource.baseArrayLayer) || (1 != pRegions[i].imageSubresource.layerCount)) { 3317 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3318 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001aa, "IMAGE", 3319 "%s(): pRegion[%d] imageSubresource.baseArrayLayer is %d and imageSubresource.layerCount is " 3320 "%d. For 3D images these must be 0 and 1, respectively. %s", 3321 function, i, pRegions[i].imageSubresource.baseArrayLayer, pRegions[i].imageSubresource.layerCount, 3322 validation_error_map[VALIDATION_ERROR_016001aa]); 3323 } 3324 } 3325 3326 // If the the calling command's VkImage parameter's format is not a depth/stencil format, 3327 // then bufferOffset must be a multiple of the calling command's VkImage parameter's texel size 3328 auto texel_size = FormatSize(image_state->createInfo.format); 3329 if (!FormatIsDepthAndStencil(image_state->createInfo.format) && SafeModulo(pRegions[i].bufferOffset, texel_size) != 0) { 3330 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3331 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600182, "IMAGE", 3332 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 3333 " must be a multiple of this format's texel size (" PRINTF_SIZE_T_SPECIFIER "). %s", 3334 function, i, pRegions[i].bufferOffset, texel_size, validation_error_map[VALIDATION_ERROR_01600182]); 3335 } 3336 3337 // BufferOffset must be a multiple of 4 3338 if (SafeModulo(pRegions[i].bufferOffset, 4) != 0) { 3339 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3340 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600184, "IMAGE", 3341 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 " must be a multiple of 4. %s", function, i, 3342 pRegions[i].bufferOffset, validation_error_map[VALIDATION_ERROR_01600184]); 3343 } 3344 3345 // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent 3346 if ((pRegions[i].bufferRowLength != 0) && (pRegions[i].bufferRowLength < pRegions[i].imageExtent.width)) { 3347 skip |= log_msg( 3348 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3349 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600186, "IMAGE", 3350 "%s(): pRegion[%d] bufferRowLength (%d) must be zero or greater-than-or-equal-to imageExtent.width (%d). %s", 3351 function, i, pRegions[i].bufferRowLength, pRegions[i].imageExtent.width, 3352 validation_error_map[VALIDATION_ERROR_01600186]); 3353 } 3354 3355 // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent 3356 if ((pRegions[i].bufferImageHeight != 0) && (pRegions[i].bufferImageHeight < pRegions[i].imageExtent.height)) { 3357 skip |= log_msg( 3358 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3359 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600188, "IMAGE", 3360 "%s(): pRegion[%d] bufferImageHeight (%d) must be zero or greater-than-or-equal-to imageExtent.height (%d). %s", 3361 function, i, pRegions[i].bufferImageHeight, pRegions[i].imageExtent.height, 3362 validation_error_map[VALIDATION_ERROR_01600188]); 3363 } 3364 3365 // subresource aspectMask must have exactly 1 bit set 3366 const int num_bits = sizeof(VkFlags) * CHAR_BIT; 3367 std::bitset<num_bits> aspect_mask_bits(pRegions[i].imageSubresource.aspectMask); 3368 if (aspect_mask_bits.count() != 1) { 3369 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3370 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a8, "IMAGE", 3371 "%s: aspectMasks for imageSubresource in each region must have only a single bit set. %s", function, 3372 validation_error_map[VALIDATION_ERROR_016001a8]); 3373 } 3374 3375 // image subresource aspect bit must match format 3376 if (!VerifyAspectsPresent(pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format)) { 3377 skip |= log_msg( 3378 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3379 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a6, "IMAGE", 3380 "%s(): pRegion[%d] subresource aspectMask 0x%x specifies aspects that are not present in image format 0x%x. %s", 3381 function, i, pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format, 3382 validation_error_map[VALIDATION_ERROR_016001a6]); 3383 } 3384 3385 // Checks that apply only to compressed images 3386 // TODO: there is a comment in ValidateCopyBufferImageTransferGranularityRequirements() in core_validation.cpp that 3387 // reserves a place for these compressed image checks. This block of code could move there once the image 3388 // stuff is moved into core validation. 3389 if (FormatIsCompressed(image_state->createInfo.format)) { 3390 auto block_size = FormatCompressedTexelBlockExtent(image_state->createInfo.format); 3391 3392 // BufferRowLength must be a multiple of block width 3393 if (SafeModulo(pRegions[i].bufferRowLength, block_size.width) != 0) { 3394 skip |= log_msg( 3395 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3396 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600196, "IMAGE", 3397 "%s(): pRegion[%d] bufferRowLength (%d) must be a multiple of the compressed image's texel width (%d). %s.", 3398 function, i, pRegions[i].bufferRowLength, block_size.width, validation_error_map[VALIDATION_ERROR_01600196]); 3399 } 3400 3401 // BufferRowHeight must be a multiple of block height 3402 if (SafeModulo(pRegions[i].bufferImageHeight, block_size.height) != 0) { 3403 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3404 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_01600198, "IMAGE", 3405 "%s(): pRegion[%d] bufferImageHeight (%d) must be a multiple of the compressed image's texel " 3406 "height (%d). %s.", 3407 function, i, pRegions[i].bufferImageHeight, block_size.height, 3408 validation_error_map[VALIDATION_ERROR_01600198]); 3409 } 3410 3411 // image offsets must be multiples of block dimensions 3412 if ((SafeModulo(pRegions[i].imageOffset.x, block_size.width) != 0) || 3413 (SafeModulo(pRegions[i].imageOffset.y, block_size.height) != 0) || 3414 (SafeModulo(pRegions[i].imageOffset.z, block_size.depth) != 0)) { 3415 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3416 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019a, "IMAGE", 3417 "%s(): pRegion[%d] imageOffset(x,y) (%d, %d) must be multiples of the compressed image's texel " 3418 "width & height (%d, %d). %s.", 3419 function, i, pRegions[i].imageOffset.x, pRegions[i].imageOffset.y, block_size.width, 3420 block_size.height, validation_error_map[VALIDATION_ERROR_0160019a]); 3421 } 3422 3423 // bufferOffset must be a multiple of block size (linear bytes) 3424 size_t block_size_in_bytes = FormatSize(image_state->createInfo.format); 3425 if (SafeModulo(pRegions[i].bufferOffset, block_size_in_bytes) != 0) { 3426 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3427 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019c, "IMAGE", 3428 "%s(): pRegion[%d] bufferOffset (0x%" PRIxLEAST64 3429 ") must be a multiple of the compressed image's texel block " 3430 "size (" PRINTF_SIZE_T_SPECIFIER "). %s.", 3431 function, i, pRegions[i].bufferOffset, block_size_in_bytes, 3432 validation_error_map[VALIDATION_ERROR_0160019c]); 3433 } 3434 3435 // imageExtent width must be a multiple of block width, or extent+offset width must equal subresource width 3436 VkExtent3D mip_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource)); 3437 if ((SafeModulo(pRegions[i].imageExtent.width, block_size.width) != 0) && 3438 (pRegions[i].imageExtent.width + pRegions[i].imageOffset.x != mip_extent.width)) { 3439 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3440 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_0160019e, "IMAGE", 3441 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block width " 3442 "(%d), or when added to offset.x (%d) must equal the image subresource width (%d). %s.", 3443 function, i, pRegions[i].imageExtent.width, block_size.width, pRegions[i].imageOffset.x, 3444 mip_extent.width, validation_error_map[VALIDATION_ERROR_0160019e]); 3445 } 3446 3447 // imageExtent height must be a multiple of block height, or extent+offset height must equal subresource height 3448 if ((SafeModulo(pRegions[i].imageExtent.height, block_size.height) != 0) && 3449 (pRegions[i].imageExtent.height + pRegions[i].imageOffset.y != mip_extent.height)) { 3450 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3451 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a0, "IMAGE", 3452 "%s(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block height " 3453 "(%d), or when added to offset.y (%d) must equal the image subresource height (%d). %s.", 3454 function, i, pRegions[i].imageExtent.height, block_size.height, pRegions[i].imageOffset.y, 3455 mip_extent.height, validation_error_map[VALIDATION_ERROR_016001a0]); 3456 } 3457 3458 // imageExtent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth 3459 if ((SafeModulo(pRegions[i].imageExtent.depth, block_size.depth) != 0) && 3460 (pRegions[i].imageExtent.depth + pRegions[i].imageOffset.z != mip_extent.depth)) { 3461 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3462 HandleToUint64(image_state->image), __LINE__, VALIDATION_ERROR_016001a2, "IMAGE", 3463 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block depth " 3464 "(%d), or when added to offset.z (%d) must equal the image subresource depth (%d). %s.", 3465 function, i, pRegions[i].imageExtent.depth, block_size.depth, pRegions[i].imageOffset.z, 3466 mip_extent.depth, validation_error_map[VALIDATION_ERROR_016001a2]); 3467 } 3468 } 3469 } 3470 3471 return skip; 3472} 3473 3474static bool ValidateImageBounds(const debug_report_data *report_data, const IMAGE_STATE *image_state, const uint32_t regionCount, 3475 const VkBufferImageCopy *pRegions, const char *func_name, UNIQUE_VALIDATION_ERROR_CODE msg_code) { 3476 bool skip = false; 3477 const VkImageCreateInfo *image_info = &(image_state->createInfo); 3478 3479 for (uint32_t i = 0; i < regionCount; i++) { 3480 VkExtent3D extent = pRegions[i].imageExtent; 3481 VkOffset3D offset = pRegions[i].imageOffset; 3482 3483 if (IsExtentSizeZero(&extent)) // Warn on zero area subresource 3484 { 3485 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3486 (uint64_t)0, __LINE__, IMAGE_ZERO_AREA_SUBREGION, "IMAGE", 3487 "%s: pRegion[%d] imageExtent of {%1d, %1d, %1d} has zero area", func_name, i, extent.width, 3488 extent.height, extent.depth); 3489 } 3490 3491 VkExtent3D image_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource)); 3492 3493 // If we're using a compressed format, valid extent is rounded up to multiple of block size (per 18.1) 3494 if (FormatIsCompressed(image_info->format)) { 3495 auto block_extent = FormatCompressedTexelBlockExtent(image_info->format); 3496 if (image_extent.width % block_extent.width) { 3497 image_extent.width += (block_extent.width - (image_extent.width % block_extent.width)); 3498 } 3499 if (image_extent.height % block_extent.height) { 3500 image_extent.height += (block_extent.height - (image_extent.height % block_extent.height)); 3501 } 3502 if (image_extent.depth % block_extent.depth) { 3503 image_extent.depth += (block_extent.depth - (image_extent.depth % block_extent.depth)); 3504 } 3505 } 3506 3507 if (0 != ExceedsBounds(&offset, &extent, &image_extent)) { 3508 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0, 3509 __LINE__, msg_code, "IMAGE", "%s: pRegion[%d] exceeds image bounds. %s.", func_name, i, 3510 validation_error_map[msg_code]); 3511 } 3512 } 3513 3514 return skip; 3515} 3516 3517static inline bool ValidateBufferBounds(const debug_report_data *report_data, IMAGE_STATE *image_state, BUFFER_STATE *buff_state, 3518 uint32_t regionCount, const VkBufferImageCopy *pRegions, const char *func_name, 3519 UNIQUE_VALIDATION_ERROR_CODE msg_code) { 3520 bool skip = false; 3521 3522 VkDeviceSize buffer_size = buff_state->createInfo.size; 3523 3524 for (uint32_t i = 0; i < regionCount; i++) { 3525 VkExtent3D copy_extent = pRegions[i].imageExtent; 3526 3527 VkDeviceSize buffer_width = (0 == pRegions[i].bufferRowLength ? copy_extent.width : pRegions[i].bufferRowLength); 3528 VkDeviceSize buffer_height = (0 == pRegions[i].bufferImageHeight ? copy_extent.height : pRegions[i].bufferImageHeight); 3529 VkDeviceSize unit_size = FormatSize(image_state->createInfo.format); // size (bytes) of texel or block 3530 3531 // Handle special buffer packing rules for specific depth/stencil formats 3532 if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) { 3533 unit_size = FormatSize(VK_FORMAT_S8_UINT); 3534 } else if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) { 3535 switch (image_state->createInfo.format) { 3536 case VK_FORMAT_D16_UNORM_S8_UINT: 3537 unit_size = FormatSize(VK_FORMAT_D16_UNORM); 3538 break; 3539 case VK_FORMAT_D32_SFLOAT_S8_UINT: 3540 unit_size = FormatSize(VK_FORMAT_D32_SFLOAT); 3541 break; 3542 case VK_FORMAT_X8_D24_UNORM_PACK32: // Fall through 3543 case VK_FORMAT_D24_UNORM_S8_UINT: 3544 unit_size = 4; 3545 break; 3546 default: 3547 break; 3548 } 3549 } 3550 3551 if (FormatIsCompressed(image_state->createInfo.format)) { 3552 // Switch to texel block units, rounding up for any partially-used blocks 3553 auto block_dim = FormatCompressedTexelBlockExtent(image_state->createInfo.format); 3554 buffer_width = (buffer_width + block_dim.width - 1) / block_dim.width; 3555 buffer_height = (buffer_height + block_dim.height - 1) / block_dim.height; 3556 3557 copy_extent.width = (copy_extent.width + block_dim.width - 1) / block_dim.width; 3558 copy_extent.height = (copy_extent.height + block_dim.height - 1) / block_dim.height; 3559 copy_extent.depth = (copy_extent.depth + block_dim.depth - 1) / block_dim.depth; 3560 } 3561 3562 // Either depth or layerCount may be greater than 1 (not both). This is the number of 'slices' to copy 3563 uint32_t z_copies = std::max(copy_extent.depth, pRegions[i].imageSubresource.layerCount); 3564 if (IsExtentSizeZero(©_extent) || (0 == z_copies)) { 3565 // TODO: Issue warning here? Already warned in ValidateImageBounds()... 3566 } else { 3567 // Calculate buffer offset of final copied byte, + 1. 3568 VkDeviceSize max_buffer_offset = (z_copies - 1) * buffer_height * buffer_width; // offset to slice 3569 max_buffer_offset += ((copy_extent.height - 1) * buffer_width) + copy_extent.width; // add row,col 3570 max_buffer_offset *= unit_size; // convert to bytes 3571 max_buffer_offset += pRegions[i].bufferOffset; // add initial offset (bytes) 3572 3573 if (buffer_size < max_buffer_offset) { 3574 skip |= 3575 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0, 3576 __LINE__, msg_code, "IMAGE", "%s: pRegion[%d] exceeds buffer size of %" PRIu64 " bytes. %s.", func_name, 3577 i, buffer_size, validation_error_map[msg_code]); 3578 } 3579 } 3580 } 3581 3582 return skip; 3583} 3584 3585bool PreCallValidateCmdCopyImageToBuffer(layer_data *device_data, VkImageLayout srcImageLayout, GLOBAL_CB_NODE *cb_node, 3586 IMAGE_STATE *src_image_state, BUFFER_STATE *dst_buffer_state, uint32_t regionCount, 3587 const VkBufferImageCopy *pRegions, const char *func_name) { 3588 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3589 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer"); 3590 3591 // Validate command buffer state 3592 if (CB_RECORDING != cb_node->state) { 3593 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3594 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_19202413, "DS", 3595 "Cannot call vkCmdCopyImageToBuffer() on command buffer which is not in recording state. %s.", 3596 validation_error_map[VALIDATION_ERROR_19202413]); 3597 } else { 3598 skip |= ValidateCmdSubpassState(device_data, cb_node, CMD_COPYIMAGETOBUFFER); 3599 } 3600 3601 // Command pool must support graphics, compute, or transfer operations 3602 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 3603 3604 VkQueueFlags queue_flags = GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].queueFlags; 3605 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) { 3606 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3607 HandleToUint64(cb_node->createInfo.commandPool), __LINE__, VALIDATION_ERROR_19202415, "DS", 3608 "Cannot call vkCmdCopyImageToBuffer() on a command buffer allocated from a pool without graphics, compute, " 3609 "or transfer capabilities. %s.", 3610 validation_error_map[VALIDATION_ERROR_19202415]); 3611 } 3612 skip |= ValidateImageBounds(report_data, src_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 3613 VALIDATION_ERROR_1920016c); 3614 skip |= ValidateBufferBounds(report_data, src_image_state, dst_buffer_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()", 3615 VALIDATION_ERROR_1920016e); 3616 3617 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage", 3618 VALIDATION_ERROR_19200178); 3619 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200176); 3620 skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200180); 3621 3622 // Validate that SRC image & DST buffer have correct usage flags set 3623 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, VALIDATION_ERROR_19200174, 3624 "vkCmdCopyImageToBuffer()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 3625 skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, 3626 VALIDATION_ERROR_1920017e, "vkCmdCopyImageToBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 3627 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_19200017); 3628 bool hit_error = false; 3629 for (uint32_t i = 0; i < regionCount; ++i) { 3630 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout, 3631 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImageToBuffer()", VALIDATION_ERROR_1920017c, 3632 &hit_error); 3633 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, src_image_state, &pRegions[i], i, 3634 "vkCmdCopyImageToBuffer()"); 3635 } 3636 return skip; 3637} 3638 3639void PreCallRecordCmdCopyImageToBuffer(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state, 3640 BUFFER_STATE *dst_buffer_state, uint32_t region_count, const VkBufferImageCopy *regions, 3641 VkImageLayout src_image_layout) { 3642 // Make sure that all image slices are updated to correct layout 3643 for (uint32_t i = 0; i < region_count; ++i) { 3644 SetImageLayout(device_data, cb_node, src_image_state, regions[i].imageSubresource, src_image_layout); 3645 } 3646 // Update bindings between buffer/image and cmd buffer 3647 AddCommandBufferBindingImage(device_data, cb_node, src_image_state); 3648 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state); 3649 3650 std::function<bool()> function = [=]() { 3651 return ValidateImageMemoryIsValid(device_data, src_image_state, "vkCmdCopyImageToBuffer()"); 3652 }; 3653 cb_node->validate_functions.push_back(function); 3654 function = [=]() { 3655 SetBufferMemoryValid(device_data, dst_buffer_state, true); 3656 return false; 3657 }; 3658 cb_node->validate_functions.push_back(function); 3659 3660 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_COPYIMAGETOBUFFER); 3661} 3662 3663bool PreCallValidateCmdCopyBufferToImage(layer_data *device_data, VkImageLayout dstImageLayout, GLOBAL_CB_NODE *cb_node, 3664 BUFFER_STATE *src_buffer_state, IMAGE_STATE *dst_image_state, uint32_t regionCount, 3665 const VkBufferImageCopy *pRegions, const char *func_name) { 3666 const debug_report_data *report_data = core_validation::GetReportData(device_data); 3667 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage"); 3668 3669 // Validate command buffer state 3670 if (CB_RECORDING != cb_node->state) { 3671 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3672 HandleToUint64(cb_node->commandBuffer), __LINE__, VALIDATION_ERROR_18e02413, "DS", 3673 "Cannot call vkCmdCopyBufferToImage() on command buffer which is not in recording state. %s.", 3674 validation_error_map[VALIDATION_ERROR_18e02413]); 3675 } else { 3676 skip |= ValidateCmdSubpassState(device_data, cb_node, CMD_COPYBUFFERTOIMAGE); 3677 } 3678 3679 // Command pool must support graphics, compute, or transfer operations 3680 auto pPool = GetCommandPoolNode(device_data, cb_node->createInfo.commandPool); 3681 VkQueueFlags queue_flags = GetPhysDevProperties(device_data)->queue_family_properties[pPool->queueFamilyIndex].queueFlags; 3682 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) { 3683 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3684 HandleToUint64(cb_node->createInfo.commandPool), __LINE__, VALIDATION_ERROR_18e02415, "DS", 3685 "Cannot call vkCmdCopyBufferToImage() on a command buffer allocated from a pool without graphics, compute, " 3686 "or transfer capabilities. %s.", 3687 validation_error_map[VALIDATION_ERROR_18e02415]); 3688 } 3689 skip |= ValidateImageBounds(report_data, dst_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 3690 VALIDATION_ERROR_18e00158); 3691 skip |= ValidateBufferBounds(report_data, dst_image_state, src_buffer_state, regionCount, pRegions, "vkCmdCopyBufferToImage()", 3692 VALIDATION_ERROR_18e00156); 3693 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage", 3694 VALIDATION_ERROR_18e00166); 3695 skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00160); 3696 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00164); 3697 skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, 3698 VALIDATION_ERROR_18e0015c, "vkCmdCopyBufferToImage()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 3699 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, VALIDATION_ERROR_18e00162, 3700 "vkCmdCopyBufferToImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 3701 skip |= insideRenderPass(device_data, cb_node, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e00017); 3702 bool hit_error = false; 3703 for (uint32_t i = 0; i < regionCount; ++i) { 3704 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout, 3705 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyBufferToImage()", VALIDATION_ERROR_18e0016a, 3706 &hit_error); 3707 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, dst_image_state, &pRegions[i], i, 3708 "vkCmdCopyBufferToImage()"); 3709 } 3710 return skip; 3711} 3712 3713void PreCallRecordCmdCopyBufferToImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, BUFFER_STATE *src_buffer_state, 3714 IMAGE_STATE *dst_image_state, uint32_t region_count, const VkBufferImageCopy *regions, 3715 VkImageLayout dst_image_layout) { 3716 // Make sure that all image slices are updated to correct layout 3717 for (uint32_t i = 0; i < region_count; ++i) { 3718 SetImageLayout(device_data, cb_node, dst_image_state, regions[i].imageSubresource, dst_image_layout); 3719 } 3720 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state); 3721 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state); 3722 std::function<bool()> function = [=]() { 3723 SetImageMemoryValid(device_data, dst_image_state, true); 3724 return false; 3725 }; 3726 cb_node->validate_functions.push_back(function); 3727 function = [=]() { return ValidateBufferMemoryIsValid(device_data, src_buffer_state, "vkCmdCopyBufferToImage()"); }; 3728 cb_node->validate_functions.push_back(function); 3729 3730 core_validation::UpdateCmdBufferLastCmd(cb_node, CMD_COPYBUFFERTOIMAGE); 3731} 3732 3733bool PreCallValidateGetImageSubresourceLayout(layer_data *device_data, VkImage image, const VkImageSubresource *pSubresource) { 3734 const auto report_data = core_validation::GetReportData(device_data); 3735 bool skip = false; 3736 const VkImageAspectFlags sub_aspect = pSubresource->aspectMask; 3737 3738 // VU 00733: The aspectMask member of pSubresource must only have a single bit set 3739 const int num_bits = sizeof(sub_aspect) * CHAR_BIT; 3740 std::bitset<num_bits> aspect_mask_bits(sub_aspect); 3741 if (aspect_mask_bits.count() != 1) { 3742 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 3743 __LINE__, VALIDATION_ERROR_2a6007ca, "IMAGE", 3744 "vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must have exactly 1 bit set. %s", 3745 validation_error_map[VALIDATION_ERROR_2a6007ca]); 3746 } 3747 3748 IMAGE_STATE *image_entry = GetImageState(device_data, image); 3749 if (!image_entry) { 3750 return skip; 3751 } 3752 3753 // VU 00732: image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR 3754 if (image_entry->createInfo.tiling != VK_IMAGE_TILING_LINEAR) { 3755 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 3756 __LINE__, VALIDATION_ERROR_2a6007c8, "IMAGE", 3757 "vkGetImageSubresourceLayout(): Image must have tiling of VK_IMAGE_TILING_LINEAR. %s", 3758 validation_error_map[VALIDATION_ERROR_2a6007c8]); 3759 } 3760 3761 // VU 00739: mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created 3762 if (pSubresource->mipLevel >= image_entry->createInfo.mipLevels) { 3763 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 3764 __LINE__, VALIDATION_ERROR_0a4007cc, "IMAGE", 3765 "vkGetImageSubresourceLayout(): pSubresource.mipLevel (%d) must be less than %d. %s", 3766 pSubresource->mipLevel, image_entry->createInfo.mipLevels, validation_error_map[VALIDATION_ERROR_0a4007cc]); 3767 } 3768 3769 // VU 00740: arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created 3770 if (pSubresource->arrayLayer >= image_entry->createInfo.arrayLayers) { 3771 skip |= 3772 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), 3773 __LINE__, VALIDATION_ERROR_0a4007ce, "IMAGE", 3774 "vkGetImageSubresourceLayout(): pSubresource.arrayLayer (%d) must be less than %d. %s", 3775 pSubresource->arrayLayer, image_entry->createInfo.arrayLayers, validation_error_map[VALIDATION_ERROR_0a4007ce]); 3776 } 3777 3778 // VU 00741: subresource's aspect must be compatible with image's format. 3779 const VkFormat img_format = image_entry->createInfo.format; 3780 if (FormatIsColor(img_format)) { 3781 if (sub_aspect != VK_IMAGE_ASPECT_COLOR_BIT) { 3782 skip |= log_msg( 3783 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), __LINE__, 3784 VALIDATION_ERROR_0a400c01, "IMAGE", 3785 "vkGetImageSubresourceLayout(): For color formats, VkImageSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR. %s", 3786 validation_error_map[VALIDATION_ERROR_0a400c01]); 3787 } 3788 } else if (FormatIsDepthOrStencil(img_format)) { 3789 if ((sub_aspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (sub_aspect != VK_IMAGE_ASPECT_STENCIL_BIT)) { 3790 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3791 HandleToUint64(image), __LINE__, VALIDATION_ERROR_0a400c01, "IMAGE", 3792 "vkGetImageSubresourceLayout(): For depth/stencil formats, VkImageSubresource.aspectMask must be " 3793 "either VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT. %s", 3794 validation_error_map[VALIDATION_ERROR_0a400c01]); 3795 } 3796 } 3797 return skip; 3798} 3799