parameter_validation.cpp revision e43d3cee564a734d8f2333346f618004357d6e0c
1/* Copyright (c) 2015-2016 The Khronos Group Inc. 2 * Copyright (c) 2015-2016 Valve Corporation 3 * Copyright (c) 2015-2016 LunarG, Inc. 4 * Copyright (C) 2015-2016 Google Inc. 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 * 18 * Author: Jeremy Hayes <jeremy@lunarg.com> 19 * Author: Tony Barbour <tony@LunarG.com> 20 * Author: Mark Lobodzinski <mark@LunarG.com> 21 * Author: Dustin Graves <dustin@lunarg.com> 22 */ 23 24#define NOMINMAX 25 26#include <math.h> 27#include <stdio.h> 28#include <stdlib.h> 29#include <string.h> 30 31#include <iostream> 32#include <string> 33#include <sstream> 34#include <unordered_map> 35#include <unordered_set> 36#include <vector> 37 38#include "vk_loader_platform.h" 39#include "vulkan/vk_layer.h" 40#include "vk_layer_config.h" 41#include "vk_enum_validate_helper.h" 42#include "vk_struct_validate_helper.h" 43 44#include "vk_layer_table.h" 45#include "vk_layer_data.h" 46#include "vk_layer_logging.h" 47#include "vk_layer_extension_utils.h" 48#include "vk_layer_utils.h" 49 50#include "parameter_validation.h" 51 52namespace parameter_validation { 53 54struct layer_data { 55 VkInstance instance; 56 57 debug_report_data *report_data; 58 std::vector<VkDebugReportCallbackEXT> logging_callback; 59 60 // The following are for keeping track of the temporary callbacks that can 61 // be used in vkCreateInstance and vkDestroyInstance: 62 uint32_t num_tmp_callbacks; 63 VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos; 64 VkDebugReportCallbackEXT *tmp_callbacks; 65 66 // TODO: Split instance/device structs 67 // Device Data 68 // Map for queue family index to queue count 69 std::unordered_map<uint32_t, uint32_t> queueFamilyIndexMap; 70 71 layer_data() : report_data(nullptr), num_tmp_callbacks(0), tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr){}; 72}; 73 74static std::unordered_map<void *, layer_data *> layer_data_map; 75static device_table_map pc_device_table_map; 76static instance_table_map pc_instance_table_map; 77 78// "my instance data" 79debug_report_data *mid(VkInstance object) { 80 dispatch_key key = get_dispatch_key(object); 81 layer_data *data = get_my_data_ptr(key, layer_data_map); 82#if DISPATCH_MAP_DEBUG 83 fprintf(stderr, "MID: map: 0x%p, object: 0x%p, key: 0x%p, data: 0x%p\n", &layer_data_map, object, key, data); 84#endif 85 assert(data != NULL); 86 87 return data->report_data; 88} 89 90// "my device data" 91debug_report_data *mdd(void *object) { 92 dispatch_key key = get_dispatch_key(object); 93 layer_data *data = get_my_data_ptr(key, layer_data_map); 94#if DISPATCH_MAP_DEBUG 95 fprintf(stderr, "MDD: map: 0x%p, object: 0x%p, key: 0x%p, data: 0x%p\n", &layer_data_map, object, key, data); 96#endif 97 assert(data != NULL); 98 return data->report_data; 99} 100 101static void init_parameter_validation(layer_data *my_data, const VkAllocationCallbacks *pAllocator) { 102 103 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_parameter_validation"); 104} 105 106VKAPI_ATTR VkResult VKAPI_CALL 107CreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 108 const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pMsgCallback) { 109 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 110 VkResult result = pTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback); 111 112 if (result == VK_SUCCESS) { 113 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 114 result = layer_create_msg_callback(data->report_data, false, pCreateInfo, pAllocator, pMsgCallback); 115 } 116 117 return result; 118} 119 120VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, 121 VkDebugReportCallbackEXT msgCallback, 122 const VkAllocationCallbacks *pAllocator) { 123 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 124 pTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 125 126 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 127 layer_destroy_msg_callback(data->report_data, msgCallback, pAllocator); 128} 129 130VKAPI_ATTR void VKAPI_CALL 131DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t object, 132 size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 133 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 134 pTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 135} 136 137static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}}; 138 139static const VkLayerProperties global_layer = { 140 "VK_LAYER_LUNARG_parameter_validation", VK_LAYER_API_VERSION, 1, "LunarG Validation Layer", 141}; 142 143static bool ValidateEnumerator(VkFormatFeatureFlagBits const &enumerator) { 144 VkFormatFeatureFlagBits allFlags = (VkFormatFeatureFlagBits)( 145 VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | 146 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | 147 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT | 148 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | 149 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT | 150 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT); 151 if (enumerator & (~allFlags)) { 152 return false; 153 } 154 155 return true; 156} 157 158static std::string EnumeratorString(VkFormatFeatureFlagBits const &enumerator) { 159 if (!ValidateEnumerator(enumerator)) { 160 return "unrecognized enumerator"; 161 } 162 163 std::vector<std::string> strings; 164 if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT) { 165 strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"); 166 } 167 if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) { 168 strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"); 169 } 170 if (enumerator & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) { 171 strings.push_back("VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"); 172 } 173 if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) { 174 strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"); 175 } 176 if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) { 177 strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"); 178 } 179 if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT) { 180 strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"); 181 } 182 if (enumerator & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) { 183 strings.push_back("VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"); 184 } 185 if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) { 186 strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"); 187 } 188 if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) { 189 strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"); 190 } 191 if (enumerator & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) { 192 strings.push_back("VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"); 193 } 194 if (enumerator & VK_FORMAT_FEATURE_BLIT_SRC_BIT) { 195 strings.push_back("VK_FORMAT_FEATURE_BLIT_SRC_BIT"); 196 } 197 if (enumerator & VK_FORMAT_FEATURE_BLIT_DST_BIT) { 198 strings.push_back("VK_FORMAT_FEATURE_BLIT_DST_BIT"); 199 } 200 if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) { 201 strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT"); 202 } 203 204 std::string enumeratorString; 205 for (auto const &string : strings) { 206 enumeratorString += string; 207 208 if (string != strings.back()) { 209 enumeratorString += '|'; 210 } 211 } 212 213 return enumeratorString; 214} 215 216static bool ValidateEnumerator(VkImageUsageFlagBits const &enumerator) { 217 VkImageUsageFlagBits allFlags = (VkImageUsageFlagBits)( 218 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | 219 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | 220 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT); 221 if (enumerator & (~allFlags)) { 222 return false; 223 } 224 225 return true; 226} 227 228static std::string EnumeratorString(VkImageUsageFlagBits const &enumerator) { 229 if (!ValidateEnumerator(enumerator)) { 230 return "unrecognized enumerator"; 231 } 232 233 std::vector<std::string> strings; 234 if (enumerator & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) { 235 strings.push_back("VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"); 236 } 237 if (enumerator & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { 238 strings.push_back("VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"); 239 } 240 if (enumerator & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) { 241 strings.push_back("VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"); 242 } 243 if (enumerator & VK_IMAGE_USAGE_STORAGE_BIT) { 244 strings.push_back("VK_IMAGE_USAGE_STORAGE_BIT"); 245 } 246 if (enumerator & VK_IMAGE_USAGE_SAMPLED_BIT) { 247 strings.push_back("VK_IMAGE_USAGE_SAMPLED_BIT"); 248 } 249 if (enumerator & VK_IMAGE_USAGE_TRANSFER_DST_BIT) { 250 strings.push_back("VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 251 } 252 if (enumerator & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT) { 253 strings.push_back("VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"); 254 } 255 if (enumerator & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) { 256 strings.push_back("VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 257 } 258 259 std::string enumeratorString; 260 for (auto const &string : strings) { 261 enumeratorString += string; 262 263 if (string != strings.back()) { 264 enumeratorString += '|'; 265 } 266 } 267 268 return enumeratorString; 269} 270 271static bool ValidateEnumerator(VkQueueFlagBits const &enumerator) { 272 VkQueueFlagBits allFlags = 273 (VkQueueFlagBits)(VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_SPARSE_BINDING_BIT | VK_QUEUE_GRAPHICS_BIT); 274 if (enumerator & (~allFlags)) { 275 return false; 276 } 277 278 return true; 279} 280 281static std::string EnumeratorString(VkQueueFlagBits const &enumerator) { 282 if (!ValidateEnumerator(enumerator)) { 283 return "unrecognized enumerator"; 284 } 285 286 std::vector<std::string> strings; 287 if (enumerator & VK_QUEUE_TRANSFER_BIT) { 288 strings.push_back("VK_QUEUE_TRANSFER_BIT"); 289 } 290 if (enumerator & VK_QUEUE_COMPUTE_BIT) { 291 strings.push_back("VK_QUEUE_COMPUTE_BIT"); 292 } 293 if (enumerator & VK_QUEUE_SPARSE_BINDING_BIT) { 294 strings.push_back("VK_QUEUE_SPARSE_BINDING_BIT"); 295 } 296 if (enumerator & VK_QUEUE_GRAPHICS_BIT) { 297 strings.push_back("VK_QUEUE_GRAPHICS_BIT"); 298 } 299 300 std::string enumeratorString; 301 for (auto const &string : strings) { 302 enumeratorString += string; 303 304 if (string != strings.back()) { 305 enumeratorString += '|'; 306 } 307 } 308 309 return enumeratorString; 310} 311 312static bool ValidateEnumerator(VkMemoryPropertyFlagBits const &enumerator) { 313 VkMemoryPropertyFlagBits allFlags = (VkMemoryPropertyFlagBits)( 314 VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | 315 VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); 316 if (enumerator & (~allFlags)) { 317 return false; 318 } 319 320 return true; 321} 322 323static std::string EnumeratorString(VkMemoryPropertyFlagBits const &enumerator) { 324 if (!ValidateEnumerator(enumerator)) { 325 return "unrecognized enumerator"; 326 } 327 328 std::vector<std::string> strings; 329 if (enumerator & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) { 330 strings.push_back("VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"); 331 } 332 if (enumerator & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) { 333 strings.push_back("VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"); 334 } 335 if (enumerator & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { 336 strings.push_back("VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"); 337 } 338 if (enumerator & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) { 339 strings.push_back("VK_MEMORY_PROPERTY_HOST_CACHED_BIT"); 340 } 341 if (enumerator & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { 342 strings.push_back("VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"); 343 } 344 345 std::string enumeratorString; 346 for (auto const &string : strings) { 347 enumeratorString += string; 348 349 if (string != strings.back()) { 350 enumeratorString += '|'; 351 } 352 } 353 354 return enumeratorString; 355} 356 357static bool ValidateEnumerator(VkMemoryHeapFlagBits const &enumerator) { 358 VkMemoryHeapFlagBits allFlags = (VkMemoryHeapFlagBits)(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT); 359 if (enumerator & (~allFlags)) { 360 return false; 361 } 362 363 return true; 364} 365 366static std::string EnumeratorString(VkMemoryHeapFlagBits const &enumerator) { 367 if (!ValidateEnumerator(enumerator)) { 368 return "unrecognized enumerator"; 369 } 370 371 std::vector<std::string> strings; 372 if (enumerator & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) { 373 strings.push_back("VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"); 374 } 375 376 std::string enumeratorString; 377 for (auto const &string : strings) { 378 enumeratorString += string; 379 380 if (string != strings.back()) { 381 enumeratorString += '|'; 382 } 383 } 384 385 return enumeratorString; 386} 387 388static bool ValidateEnumerator(VkSparseImageFormatFlagBits const &enumerator) { 389 VkSparseImageFormatFlagBits allFlags = 390 (VkSparseImageFormatFlagBits)(VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT | 391 VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT | VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT); 392 if (enumerator & (~allFlags)) { 393 return false; 394 } 395 396 return true; 397} 398 399static std::string EnumeratorString(VkSparseImageFormatFlagBits const &enumerator) { 400 if (!ValidateEnumerator(enumerator)) { 401 return "unrecognized enumerator"; 402 } 403 404 std::vector<std::string> strings; 405 if (enumerator & VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT) { 406 strings.push_back("VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT"); 407 } 408 if (enumerator & VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT) { 409 strings.push_back("VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"); 410 } 411 if (enumerator & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) { 412 strings.push_back("VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"); 413 } 414 415 std::string enumeratorString; 416 for (auto const &string : strings) { 417 enumeratorString += string; 418 419 if (string != strings.back()) { 420 enumeratorString += '|'; 421 } 422 } 423 424 return enumeratorString; 425} 426 427static bool ValidateEnumerator(VkFenceCreateFlagBits const &enumerator) { 428 VkFenceCreateFlagBits allFlags = (VkFenceCreateFlagBits)(VK_FENCE_CREATE_SIGNALED_BIT); 429 if (enumerator & (~allFlags)) { 430 return false; 431 } 432 433 return true; 434} 435 436static std::string EnumeratorString(VkFenceCreateFlagBits const &enumerator) { 437 if (!ValidateEnumerator(enumerator)) { 438 return "unrecognized enumerator"; 439 } 440 441 std::vector<std::string> strings; 442 if (enumerator & VK_FENCE_CREATE_SIGNALED_BIT) { 443 strings.push_back("VK_FENCE_CREATE_SIGNALED_BIT"); 444 } 445 446 std::string enumeratorString; 447 for (auto const &string : strings) { 448 enumeratorString += string; 449 450 if (string != strings.back()) { 451 enumeratorString += '|'; 452 } 453 } 454 455 return enumeratorString; 456} 457 458static bool ValidateEnumerator(VkQueryPipelineStatisticFlagBits const &enumerator) { 459 VkQueryPipelineStatisticFlagBits allFlags = (VkQueryPipelineStatisticFlagBits)( 460 VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT | 461 VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT | VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT | 462 VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT | 463 VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT | 464 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT | 465 VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT | 466 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT); 467 if (enumerator & (~allFlags)) { 468 return false; 469 } 470 471 return true; 472} 473 474static std::string EnumeratorString(VkQueryPipelineStatisticFlagBits const &enumerator) { 475 if (!ValidateEnumerator(enumerator)) { 476 return "unrecognized enumerator"; 477 } 478 479 std::vector<std::string> strings; 480 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT) { 481 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"); 482 } 483 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT) { 484 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"); 485 } 486 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT) { 487 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"); 488 } 489 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT) { 490 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"); 491 } 492 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT) { 493 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"); 494 } 495 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT) { 496 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"); 497 } 498 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT) { 499 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"); 500 } 501 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT) { 502 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"); 503 } 504 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT) { 505 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"); 506 } 507 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT) { 508 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"); 509 } 510 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT) { 511 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT"); 512 } 513 514 std::string enumeratorString; 515 for (auto const &string : strings) { 516 enumeratorString += string; 517 518 if (string != strings.back()) { 519 enumeratorString += '|'; 520 } 521 } 522 523 return enumeratorString; 524} 525 526static bool ValidateEnumerator(VkQueryResultFlagBits const &enumerator) { 527 VkQueryResultFlagBits allFlags = (VkQueryResultFlagBits)(VK_QUERY_RESULT_PARTIAL_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT | 528 VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT); 529 if (enumerator & (~allFlags)) { 530 return false; 531 } 532 533 return true; 534} 535 536static std::string EnumeratorString(VkQueryResultFlagBits const &enumerator) { 537 if (!ValidateEnumerator(enumerator)) { 538 return "unrecognized enumerator"; 539 } 540 541 std::vector<std::string> strings; 542 if (enumerator & VK_QUERY_RESULT_PARTIAL_BIT) { 543 strings.push_back("VK_QUERY_RESULT_PARTIAL_BIT"); 544 } 545 if (enumerator & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) { 546 strings.push_back("VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"); 547 } 548 if (enumerator & VK_QUERY_RESULT_WAIT_BIT) { 549 strings.push_back("VK_QUERY_RESULT_WAIT_BIT"); 550 } 551 if (enumerator & VK_QUERY_RESULT_64_BIT) { 552 strings.push_back("VK_QUERY_RESULT_64_BIT"); 553 } 554 555 std::string enumeratorString; 556 for (auto const &string : strings) { 557 enumeratorString += string; 558 559 if (string != strings.back()) { 560 enumeratorString += '|'; 561 } 562 } 563 564 return enumeratorString; 565} 566 567static bool ValidateEnumerator(VkBufferUsageFlagBits const &enumerator) { 568 VkBufferUsageFlagBits allFlags = (VkBufferUsageFlagBits)( 569 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | 570 VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | 571 VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT); 572 if (enumerator & (~allFlags)) { 573 return false; 574 } 575 576 return true; 577} 578 579static std::string EnumeratorString(VkBufferUsageFlagBits const &enumerator) { 580 if (!ValidateEnumerator(enumerator)) { 581 return "unrecognized enumerator"; 582 } 583 584 std::vector<std::string> strings; 585 if (enumerator & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) { 586 strings.push_back("VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"); 587 } 588 if (enumerator & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) { 589 strings.push_back("VK_BUFFER_USAGE_INDEX_BUFFER_BIT"); 590 } 591 if (enumerator & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT) { 592 strings.push_back("VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"); 593 } 594 if (enumerator & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) { 595 strings.push_back("VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"); 596 } 597 if (enumerator & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) { 598 strings.push_back("VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"); 599 } 600 if (enumerator & VK_BUFFER_USAGE_TRANSFER_DST_BIT) { 601 strings.push_back("VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 602 } 603 if (enumerator & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) { 604 strings.push_back("VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"); 605 } 606 if (enumerator & VK_BUFFER_USAGE_TRANSFER_SRC_BIT) { 607 strings.push_back("VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 608 } 609 if (enumerator & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) { 610 strings.push_back("VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"); 611 } 612 613 std::string enumeratorString; 614 for (auto const &string : strings) { 615 enumeratorString += string; 616 617 if (string != strings.back()) { 618 enumeratorString += '|'; 619 } 620 } 621 622 return enumeratorString; 623} 624 625static bool ValidateEnumerator(VkBufferCreateFlagBits const &enumerator) { 626 VkBufferCreateFlagBits allFlags = (VkBufferCreateFlagBits)( 627 VK_BUFFER_CREATE_SPARSE_ALIASED_BIT | VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT); 628 if (enumerator & (~allFlags)) { 629 return false; 630 } 631 632 return true; 633} 634 635static std::string EnumeratorString(VkBufferCreateFlagBits const &enumerator) { 636 if (!ValidateEnumerator(enumerator)) { 637 return "unrecognized enumerator"; 638 } 639 640 std::vector<std::string> strings; 641 if (enumerator & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) { 642 strings.push_back("VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"); 643 } 644 if (enumerator & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) { 645 strings.push_back("VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"); 646 } 647 if (enumerator & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) { 648 strings.push_back("VK_BUFFER_CREATE_SPARSE_BINDING_BIT"); 649 } 650 651 std::string enumeratorString; 652 for (auto const &string : strings) { 653 enumeratorString += string; 654 655 if (string != strings.back()) { 656 enumeratorString += '|'; 657 } 658 } 659 660 return enumeratorString; 661} 662 663static bool ValidateEnumerator(VkImageCreateFlagBits const &enumerator) { 664 VkImageCreateFlagBits allFlags = (VkImageCreateFlagBits)( 665 VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT | VK_IMAGE_CREATE_SPARSE_ALIASED_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | 666 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_SPARSE_BINDING_BIT); 667 if (enumerator & (~allFlags)) { 668 return false; 669 } 670 671 return true; 672} 673 674static std::string EnumeratorString(VkImageCreateFlagBits const &enumerator) { 675 if (!ValidateEnumerator(enumerator)) { 676 return "unrecognized enumerator"; 677 } 678 679 std::vector<std::string> strings; 680 if (enumerator & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) { 681 strings.push_back("VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"); 682 } 683 if (enumerator & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) { 684 strings.push_back("VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"); 685 } 686 if (enumerator & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) { 687 strings.push_back("VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"); 688 } 689 if (enumerator & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) { 690 strings.push_back("VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"); 691 } 692 if (enumerator & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) { 693 strings.push_back("VK_IMAGE_CREATE_SPARSE_BINDING_BIT"); 694 } 695 696 std::string enumeratorString; 697 for (auto const &string : strings) { 698 enumeratorString += string; 699 700 if (string != strings.back()) { 701 enumeratorString += '|'; 702 } 703 } 704 705 return enumeratorString; 706} 707 708static bool ValidateEnumerator(VkColorComponentFlagBits const &enumerator) { 709 VkColorComponentFlagBits allFlags = (VkColorComponentFlagBits)(VK_COLOR_COMPONENT_A_BIT | VK_COLOR_COMPONENT_B_BIT | 710 VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_R_BIT); 711 if (enumerator & (~allFlags)) { 712 return false; 713 } 714 715 return true; 716} 717 718static std::string EnumeratorString(VkColorComponentFlagBits const &enumerator) { 719 if (!ValidateEnumerator(enumerator)) { 720 return "unrecognized enumerator"; 721 } 722 723 std::vector<std::string> strings; 724 if (enumerator & VK_COLOR_COMPONENT_A_BIT) { 725 strings.push_back("VK_COLOR_COMPONENT_A_BIT"); 726 } 727 if (enumerator & VK_COLOR_COMPONENT_B_BIT) { 728 strings.push_back("VK_COLOR_COMPONENT_B_BIT"); 729 } 730 if (enumerator & VK_COLOR_COMPONENT_G_BIT) { 731 strings.push_back("VK_COLOR_COMPONENT_G_BIT"); 732 } 733 if (enumerator & VK_COLOR_COMPONENT_R_BIT) { 734 strings.push_back("VK_COLOR_COMPONENT_R_BIT"); 735 } 736 737 std::string enumeratorString; 738 for (auto const &string : strings) { 739 enumeratorString += string; 740 741 if (string != strings.back()) { 742 enumeratorString += '|'; 743 } 744 } 745 746 return enumeratorString; 747} 748 749static bool ValidateEnumerator(VkPipelineCreateFlagBits const &enumerator) { 750 VkPipelineCreateFlagBits allFlags = (VkPipelineCreateFlagBits)( 751 VK_PIPELINE_CREATE_DERIVATIVE_BIT | VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT | VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT); 752 if (enumerator & (~allFlags)) { 753 return false; 754 } 755 756 return true; 757} 758 759static std::string EnumeratorString(VkPipelineCreateFlagBits const &enumerator) { 760 if (!ValidateEnumerator(enumerator)) { 761 return "unrecognized enumerator"; 762 } 763 764 std::vector<std::string> strings; 765 if (enumerator & VK_PIPELINE_CREATE_DERIVATIVE_BIT) { 766 strings.push_back("VK_PIPELINE_CREATE_DERIVATIVE_BIT"); 767 } 768 if (enumerator & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT) { 769 strings.push_back("VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"); 770 } 771 if (enumerator & VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT) { 772 strings.push_back("VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"); 773 } 774 775 std::string enumeratorString; 776 for (auto const &string : strings) { 777 enumeratorString += string; 778 779 if (string != strings.back()) { 780 enumeratorString += '|'; 781 } 782 } 783 784 return enumeratorString; 785} 786 787static bool ValidateEnumerator(VkShaderStageFlagBits const &enumerator) { 788 VkShaderStageFlagBits allFlags = (VkShaderStageFlagBits)( 789 VK_SHADER_STAGE_ALL | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_COMPUTE_BIT | 790 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_VERTEX_BIT); 791 if (enumerator & (~allFlags)) { 792 return false; 793 } 794 795 return true; 796} 797 798static std::string EnumeratorString(VkShaderStageFlagBits const &enumerator) { 799 if (!ValidateEnumerator(enumerator)) { 800 return "unrecognized enumerator"; 801 } 802 803 std::vector<std::string> strings; 804 if (enumerator & VK_SHADER_STAGE_ALL) { 805 strings.push_back("VK_SHADER_STAGE_ALL"); 806 } 807 if (enumerator & VK_SHADER_STAGE_FRAGMENT_BIT) { 808 strings.push_back("VK_SHADER_STAGE_FRAGMENT_BIT"); 809 } 810 if (enumerator & VK_SHADER_STAGE_GEOMETRY_BIT) { 811 strings.push_back("VK_SHADER_STAGE_GEOMETRY_BIT"); 812 } 813 if (enumerator & VK_SHADER_STAGE_COMPUTE_BIT) { 814 strings.push_back("VK_SHADER_STAGE_COMPUTE_BIT"); 815 } 816 if (enumerator & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) { 817 strings.push_back("VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"); 818 } 819 if (enumerator & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) { 820 strings.push_back("VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"); 821 } 822 if (enumerator & VK_SHADER_STAGE_VERTEX_BIT) { 823 strings.push_back("VK_SHADER_STAGE_VERTEX_BIT"); 824 } 825 826 std::string enumeratorString; 827 for (auto const &string : strings) { 828 enumeratorString += string; 829 830 if (string != strings.back()) { 831 enumeratorString += '|'; 832 } 833 } 834 835 return enumeratorString; 836} 837 838static bool ValidateEnumerator(VkPipelineStageFlagBits const &enumerator) { 839 VkPipelineStageFlagBits allFlags = (VkPipelineStageFlagBits)( 840 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT | VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_HOST_BIT | 841 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | 842 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | 843 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | 844 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | 845 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | 846 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT); 847 if (enumerator & (~allFlags)) { 848 return false; 849 } 850 851 return true; 852} 853 854static std::string EnumeratorString(VkPipelineStageFlagBits const &enumerator) { 855 if (!ValidateEnumerator(enumerator)) { 856 return "unrecognized enumerator"; 857 } 858 859 std::vector<std::string> strings; 860 if (enumerator & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) { 861 strings.push_back("VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"); 862 } 863 if (enumerator & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) { 864 strings.push_back("VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"); 865 } 866 if (enumerator & VK_PIPELINE_STAGE_HOST_BIT) { 867 strings.push_back("VK_PIPELINE_STAGE_HOST_BIT"); 868 } 869 if (enumerator & VK_PIPELINE_STAGE_TRANSFER_BIT) { 870 strings.push_back("VK_PIPELINE_STAGE_TRANSFER_BIT"); 871 } 872 if (enumerator & VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT) { 873 strings.push_back("VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"); 874 } 875 if (enumerator & VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT) { 876 strings.push_back("VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"); 877 } 878 if (enumerator & VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT) { 879 strings.push_back("VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"); 880 } 881 if (enumerator & VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT) { 882 strings.push_back("VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"); 883 } 884 if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT) { 885 strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT"); 886 } 887 if (enumerator & VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT) { 888 strings.push_back("VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"); 889 } 890 if (enumerator & VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT) { 891 strings.push_back("VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"); 892 } 893 if (enumerator & VK_PIPELINE_STAGE_VERTEX_SHADER_BIT) { 894 strings.push_back("VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"); 895 } 896 if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT) { 897 strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT"); 898 } 899 if (enumerator & VK_PIPELINE_STAGE_VERTEX_INPUT_BIT) { 900 strings.push_back("VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"); 901 } 902 if (enumerator & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT) { 903 strings.push_back("VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"); 904 } 905 if (enumerator & VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT) { 906 strings.push_back("VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"); 907 } 908 if (enumerator & VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) { 909 strings.push_back("VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"); 910 } 911 912 std::string enumeratorString; 913 for (auto const &string : strings) { 914 enumeratorString += string; 915 916 if (string != strings.back()) { 917 enumeratorString += '|'; 918 } 919 } 920 921 return enumeratorString; 922} 923 924static bool ValidateEnumerator(VkAccessFlagBits const &enumerator) { 925 VkAccessFlagBits allFlags = (VkAccessFlagBits)( 926 VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | 927 VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | 928 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 929 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT | 930 VK_ACCESS_HOST_READ_BIT | VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT); 931 932 if (enumerator & (~allFlags)) { 933 return false; 934 } 935 936 return true; 937} 938 939static std::string EnumeratorString(VkAccessFlagBits const &enumerator) { 940 if (!ValidateEnumerator(enumerator)) { 941 return "unrecognized enumerator"; 942 } 943 944 std::vector<std::string> strings; 945 if (enumerator & VK_ACCESS_INDIRECT_COMMAND_READ_BIT) { 946 strings.push_back("VK_ACCESS_INDIRECT_COMMAND_READ_BIT"); 947 } 948 if (enumerator & VK_ACCESS_INDEX_READ_BIT) { 949 strings.push_back("VK_ACCESS_INDEX_READ_BIT"); 950 } 951 if (enumerator & VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT) { 952 strings.push_back("VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"); 953 } 954 if (enumerator & VK_ACCESS_UNIFORM_READ_BIT) { 955 strings.push_back("VK_ACCESS_UNIFORM_READ_BIT"); 956 } 957 if (enumerator & VK_ACCESS_INPUT_ATTACHMENT_READ_BIT) { 958 strings.push_back("VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"); 959 } 960 if (enumerator & VK_ACCESS_SHADER_READ_BIT) { 961 strings.push_back("VK_ACCESS_SHADER_READ_BIT"); 962 } 963 if (enumerator & VK_ACCESS_SHADER_WRITE_BIT) { 964 strings.push_back("VK_ACCESS_SHADER_WRITE_BIT"); 965 } 966 if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_READ_BIT) { 967 strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"); 968 } 969 if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT) { 970 strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"); 971 } 972 if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT) { 973 strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"); 974 } 975 if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) { 976 strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"); 977 } 978 if (enumerator & VK_ACCESS_TRANSFER_READ_BIT) { 979 strings.push_back("VK_ACCESS_TRANSFER_READ_BIT"); 980 } 981 if (enumerator & VK_ACCESS_TRANSFER_WRITE_BIT) { 982 strings.push_back("VK_ACCESS_TRANSFER_WRITE_BIT"); 983 } 984 if (enumerator & VK_ACCESS_HOST_READ_BIT) { 985 strings.push_back("VK_ACCESS_HOST_READ_BIT"); 986 } 987 if (enumerator & VK_ACCESS_HOST_WRITE_BIT) { 988 strings.push_back("VK_ACCESS_HOST_WRITE_BIT"); 989 } 990 if (enumerator & VK_ACCESS_MEMORY_READ_BIT) { 991 strings.push_back("VK_ACCESS_MEMORY_READ_BIT"); 992 } 993 if (enumerator & VK_ACCESS_MEMORY_WRITE_BIT) { 994 strings.push_back("VK_ACCESS_MEMORY_WRITE_BIT"); 995 } 996 997 std::string enumeratorString; 998 for (auto const &string : strings) { 999 enumeratorString += string; 1000 1001 if (string != strings.back()) { 1002 enumeratorString += '|'; 1003 } 1004 } 1005 1006 return enumeratorString; 1007} 1008 1009static bool ValidateEnumerator(VkCommandPoolCreateFlagBits const &enumerator) { 1010 VkCommandPoolCreateFlagBits allFlags = 1011 (VkCommandPoolCreateFlagBits)(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT | VK_COMMAND_POOL_CREATE_TRANSIENT_BIT); 1012 if (enumerator & (~allFlags)) { 1013 return false; 1014 } 1015 1016 return true; 1017} 1018 1019static std::string EnumeratorString(VkCommandPoolCreateFlagBits const &enumerator) { 1020 if (!ValidateEnumerator(enumerator)) { 1021 return "unrecognized enumerator"; 1022 } 1023 1024 std::vector<std::string> strings; 1025 if (enumerator & VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT) { 1026 strings.push_back("VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"); 1027 } 1028 if (enumerator & VK_COMMAND_POOL_CREATE_TRANSIENT_BIT) { 1029 strings.push_back("VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"); 1030 } 1031 1032 std::string enumeratorString; 1033 for (auto const &string : strings) { 1034 enumeratorString += string; 1035 1036 if (string != strings.back()) { 1037 enumeratorString += '|'; 1038 } 1039 } 1040 1041 return enumeratorString; 1042} 1043 1044static bool ValidateEnumerator(VkCommandPoolResetFlagBits const &enumerator) { 1045 VkCommandPoolResetFlagBits allFlags = (VkCommandPoolResetFlagBits)(VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT); 1046 if (enumerator & (~allFlags)) { 1047 return false; 1048 } 1049 1050 return true; 1051} 1052 1053static std::string EnumeratorString(VkCommandPoolResetFlagBits const &enumerator) { 1054 if (!ValidateEnumerator(enumerator)) { 1055 return "unrecognized enumerator"; 1056 } 1057 1058 std::vector<std::string> strings; 1059 if (enumerator & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) { 1060 strings.push_back("VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"); 1061 } 1062 1063 std::string enumeratorString; 1064 for (auto const &string : strings) { 1065 enumeratorString += string; 1066 1067 if (string != strings.back()) { 1068 enumeratorString += '|'; 1069 } 1070 } 1071 1072 return enumeratorString; 1073} 1074 1075static bool ValidateEnumerator(VkCommandBufferUsageFlags const &enumerator) { 1076 VkCommandBufferUsageFlags allFlags = 1077 (VkCommandBufferUsageFlags)(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT | 1078 VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT); 1079 if (enumerator & (~allFlags)) { 1080 return false; 1081 } 1082 1083 return true; 1084} 1085 1086static std::string EnumeratorString(VkCommandBufferUsageFlags const &enumerator) { 1087 if (!ValidateEnumerator(enumerator)) { 1088 return "unrecognized enumerator"; 1089 } 1090 1091 std::vector<std::string> strings; 1092 if (enumerator & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) { 1093 strings.push_back("VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"); 1094 } 1095 if (enumerator & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) { 1096 strings.push_back("VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"); 1097 } 1098 if (enumerator & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) { 1099 strings.push_back("VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"); 1100 } 1101 1102 std::string enumeratorString; 1103 for (auto const &string : strings) { 1104 enumeratorString += string; 1105 1106 if (string != strings.back()) { 1107 enumeratorString += '|'; 1108 } 1109 } 1110 1111 return enumeratorString; 1112} 1113 1114static bool ValidateEnumerator(VkCommandBufferResetFlagBits const &enumerator) { 1115 VkCommandBufferResetFlagBits allFlags = (VkCommandBufferResetFlagBits)(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); 1116 if (enumerator & (~allFlags)) { 1117 return false; 1118 } 1119 1120 return true; 1121} 1122 1123static std::string EnumeratorString(VkCommandBufferResetFlagBits const &enumerator) { 1124 if (!ValidateEnumerator(enumerator)) { 1125 return "unrecognized enumerator"; 1126 } 1127 1128 std::vector<std::string> strings; 1129 if (enumerator & VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT) { 1130 strings.push_back("VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"); 1131 } 1132 1133 std::string enumeratorString; 1134 for (auto const &string : strings) { 1135 enumeratorString += string; 1136 1137 if (string != strings.back()) { 1138 enumeratorString += '|'; 1139 } 1140 } 1141 1142 return enumeratorString; 1143} 1144 1145static bool ValidateEnumerator(VkImageAspectFlagBits const &enumerator) { 1146 VkImageAspectFlagBits allFlags = (VkImageAspectFlagBits)(VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 1147 VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT); 1148 if (enumerator & (~allFlags)) { 1149 return false; 1150 } 1151 1152 return true; 1153} 1154 1155static std::string EnumeratorString(VkImageAspectFlagBits const &enumerator) { 1156 if (!ValidateEnumerator(enumerator)) { 1157 return "unrecognized enumerator"; 1158 } 1159 1160 std::vector<std::string> strings; 1161 if (enumerator & VK_IMAGE_ASPECT_METADATA_BIT) { 1162 strings.push_back("VK_IMAGE_ASPECT_METADATA_BIT"); 1163 } 1164 if (enumerator & VK_IMAGE_ASPECT_STENCIL_BIT) { 1165 strings.push_back("VK_IMAGE_ASPECT_STENCIL_BIT"); 1166 } 1167 if (enumerator & VK_IMAGE_ASPECT_DEPTH_BIT) { 1168 strings.push_back("VK_IMAGE_ASPECT_DEPTH_BIT"); 1169 } 1170 if (enumerator & VK_IMAGE_ASPECT_COLOR_BIT) { 1171 strings.push_back("VK_IMAGE_ASPECT_COLOR_BIT"); 1172 } 1173 1174 std::string enumeratorString; 1175 for (auto const &string : strings) { 1176 enumeratorString += string; 1177 1178 if (string != strings.back()) { 1179 enumeratorString += '|'; 1180 } 1181 } 1182 1183 return enumeratorString; 1184} 1185 1186static bool ValidateEnumerator(VkQueryControlFlagBits const &enumerator) { 1187 VkQueryControlFlagBits allFlags = (VkQueryControlFlagBits)(VK_QUERY_CONTROL_PRECISE_BIT); 1188 if (enumerator & (~allFlags)) { 1189 return false; 1190 } 1191 1192 return true; 1193} 1194 1195static std::string EnumeratorString(VkQueryControlFlagBits const &enumerator) { 1196 if (!ValidateEnumerator(enumerator)) { 1197 return "unrecognized enumerator"; 1198 } 1199 1200 std::vector<std::string> strings; 1201 if (enumerator & VK_QUERY_CONTROL_PRECISE_BIT) { 1202 strings.push_back("VK_QUERY_CONTROL_PRECISE_BIT"); 1203 } 1204 1205 std::string enumeratorString; 1206 for (auto const &string : strings) { 1207 enumeratorString += string; 1208 1209 if (string != strings.back()) { 1210 enumeratorString += '|'; 1211 } 1212 } 1213 1214 return enumeratorString; 1215} 1216 1217static const int MaxParamCheckerStringLength = 256; 1218 1219static bool validate_string(debug_report_data *report_data, const char *apiName, const char *stringName, 1220 const char *validateString) { 1221 assert(apiName != nullptr); 1222 assert(stringName != nullptr); 1223 assert(validateString != nullptr); 1224 1225 bool skipCall = false; 1226 1227 VkStringErrorFlags result = vk_string_validate(MaxParamCheckerStringLength, validateString); 1228 1229 if (result == VK_STRING_ERROR_NONE) { 1230 return skipCall; 1231 } else if (result & VK_STRING_ERROR_LENGTH) { 1232 skipCall = 1233 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1234 "PARAMCHECK", "%s: string %s exceeds max length %d", apiName, stringName, MaxParamCheckerStringLength); 1235 } else if (result & VK_STRING_ERROR_BAD_DATA) { 1236 skipCall = 1237 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1238 "PARAMCHECK", "%s: string %s contains invalid characters or is badly formed", apiName, stringName); 1239 } 1240 return skipCall; 1241} 1242 1243static bool validate_queue_family_index(layer_data *device_data, const char *function_name, const char *parameter_name, 1244 uint32_t index) { 1245 assert(device_data != nullptr); 1246 debug_report_data *report_data = device_data->report_data; 1247 bool skip_call = false; 1248 1249 if (index == VK_QUEUE_FAMILY_IGNORED) { 1250 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1251 "PARAMCHECK", "%s: %s cannot be VK_QUEUE_FAMILY_IGNORED.", function_name, parameter_name); 1252 } else { 1253 const auto &queue_data = device_data->queueFamilyIndexMap.find(index); 1254 if (queue_data == device_data->queueFamilyIndexMap.end()) { 1255 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1256 "PARAMCHECK", "%s: %s (%d) must be one of the indices specified when the device was created, via " 1257 "the VkDeviceQueueCreateInfo structure.", 1258 function_name, parameter_name, index); 1259 return false; 1260 } 1261 } 1262 1263 return skip_call; 1264} 1265 1266static bool validate_queue_family_indices(layer_data *device_data, const char *function_name, const char *parameter_name, 1267 const uint32_t count, const uint32_t *indices) { 1268 assert(device_data != nullptr); 1269 debug_report_data *report_data = device_data->report_data; 1270 bool skip_call = false; 1271 1272 if (indices != nullptr) { 1273 for (uint32_t i = 0; i < count; i++) { 1274 if (indices[i] == VK_QUEUE_FAMILY_IGNORED) { 1275 skip_call |= 1276 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", 1277 "%s: %s[%d] cannot be VK_QUEUE_FAMILY_IGNORED.", function_name, parameter_name, i); 1278 } else { 1279 const auto &queue_data = device_data->queueFamilyIndexMap.find(indices[i]); 1280 if (queue_data == device_data->queueFamilyIndexMap.end()) { 1281 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1282 "PARAMCHECK", "%s: %s[%d] (%d) must be one of the indices specified when the device was " 1283 "created, via the VkDeviceQueueCreateInfo structure.", 1284 function_name, parameter_name, i, indices[i]); 1285 return false; 1286 } 1287 } 1288 } 1289 } 1290 1291 return skip_call; 1292} 1293 1294VKAPI_ATTR VkResult VKAPI_CALL 1295CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) { 1296 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1297 1298 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 1299 assert(chain_info != nullptr); 1300 assert(chain_info->u.pLayerInfo != nullptr); 1301 1302 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 1303 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance"); 1304 if (fpCreateInstance == NULL) { 1305 return VK_ERROR_INITIALIZATION_FAILED; 1306 } 1307 1308 // Advance the link info for the next element on the chain 1309 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 1310 1311 result = fpCreateInstance(pCreateInfo, pAllocator, pInstance); 1312 1313 if (result == VK_SUCCESS) { 1314 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map); 1315 assert(my_instance_data != nullptr); 1316 1317 VkLayerInstanceDispatchTable *pTable = initInstanceTable(*pInstance, fpGetInstanceProcAddr, pc_instance_table_map); 1318 1319 my_instance_data->instance = *pInstance; 1320 my_instance_data->report_data = debug_report_create_instance(pTable, *pInstance, pCreateInfo->enabledExtensionCount, 1321 pCreateInfo->ppEnabledExtensionNames); 1322 1323 // Look for one or more debug report create info structures 1324 // and setup a callback(s) for each one found. 1325 if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &my_instance_data->num_tmp_callbacks, 1326 &my_instance_data->tmp_dbg_create_infos, &my_instance_data->tmp_callbacks)) { 1327 if (my_instance_data->num_tmp_callbacks > 0) { 1328 // Setup the temporary callback(s) here to catch early issues: 1329 if (layer_enable_tmp_callbacks(my_instance_data->report_data, my_instance_data->num_tmp_callbacks, 1330 my_instance_data->tmp_dbg_create_infos, my_instance_data->tmp_callbacks)) { 1331 // Failure of setting up one or more of the callback. 1332 // Therefore, clean up and don't use those callbacks: 1333 layer_free_tmp_callbacks(my_instance_data->tmp_dbg_create_infos, my_instance_data->tmp_callbacks); 1334 my_instance_data->num_tmp_callbacks = 0; 1335 } 1336 } 1337 } 1338 1339 init_parameter_validation(my_instance_data, pAllocator); 1340 1341 // Ordinarily we'd check these before calling down the chain, but none of the layer 1342 // support is in place until now, if we survive we can report the issue now. 1343 parameter_validation_vkCreateInstance(my_instance_data->report_data, pCreateInfo, pAllocator, pInstance); 1344 1345 if (pCreateInfo->pApplicationInfo) { 1346 if (pCreateInfo->pApplicationInfo->pApplicationName) { 1347 validate_string(my_instance_data->report_data, "vkCreateInstance", 1348 "pCreateInfo->VkApplicationInfo->pApplicationName", 1349 pCreateInfo->pApplicationInfo->pApplicationName); 1350 } 1351 1352 if (pCreateInfo->pApplicationInfo->pEngineName) { 1353 validate_string(my_instance_data->report_data, "vkCreateInstance", "pCreateInfo->VkApplicationInfo->pEngineName", 1354 pCreateInfo->pApplicationInfo->pEngineName); 1355 } 1356 } 1357 1358 // Disable the tmp callbacks: 1359 if (my_instance_data->num_tmp_callbacks > 0) { 1360 layer_disable_tmp_callbacks(my_instance_data->report_data, my_instance_data->num_tmp_callbacks, 1361 my_instance_data->tmp_callbacks); 1362 } 1363 } 1364 1365 return result; 1366} 1367 1368VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) { 1369 // Grab the key before the instance is destroyed. 1370 dispatch_key key = get_dispatch_key(instance); 1371 bool skipCall = false; 1372 layer_data *my_data = get_my_data_ptr(key, layer_data_map); 1373 assert(my_data != NULL); 1374 1375 // Enable the temporary callback(s) here to catch vkDestroyInstance issues: 1376 bool callback_setup = false; 1377 if (my_data->num_tmp_callbacks > 0) { 1378 if (!layer_enable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_dbg_create_infos, 1379 my_data->tmp_callbacks)) { 1380 callback_setup = true; 1381 } 1382 } 1383 1384 skipCall |= parameter_validation_vkDestroyInstance(my_data->report_data, pAllocator); 1385 1386 // Disable and cleanup the temporary callback(s): 1387 if (callback_setup) { 1388 layer_disable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_callbacks); 1389 } 1390 if (my_data->num_tmp_callbacks > 0) { 1391 layer_free_tmp_callbacks(my_data->tmp_dbg_create_infos, my_data->tmp_callbacks); 1392 my_data->num_tmp_callbacks = 0; 1393 } 1394 1395 if (!skipCall) { 1396 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 1397 pTable->DestroyInstance(instance, pAllocator); 1398 1399 // Clean up logging callback, if any 1400 while (my_data->logging_callback.size() > 0) { 1401 VkDebugReportCallbackEXT callback = my_data->logging_callback.back(); 1402 layer_destroy_msg_callback(my_data->report_data, callback, pAllocator); 1403 my_data->logging_callback.pop_back(); 1404 } 1405 1406 layer_debug_report_destroy_instance(mid(instance)); 1407 layer_data_map.erase(pTable); 1408 1409 pc_instance_table_map.erase(key); 1410 layer_data_map.erase(key); 1411 } 1412} 1413 1414VKAPI_ATTR VkResult VKAPI_CALL 1415EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices) { 1416 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1417 bool skipCall = false; 1418 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 1419 assert(my_data != NULL); 1420 1421 skipCall |= parameter_validation_vkEnumeratePhysicalDevices(my_data->report_data, pPhysicalDeviceCount, pPhysicalDevices); 1422 1423 if (!skipCall) { 1424 result = get_dispatch_table(pc_instance_table_map, instance) 1425 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); 1426 1427 validate_result(my_data->report_data, "vkEnumeratePhysicalDevices", result); 1428 } 1429 1430 return result; 1431} 1432 1433VKAPI_ATTR void VKAPI_CALL 1434GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) { 1435 bool skipCall = false; 1436 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1437 assert(my_data != NULL); 1438 1439 skipCall |= parameter_validation_vkGetPhysicalDeviceFeatures(my_data->report_data, pFeatures); 1440 1441 if (!skipCall) { 1442 get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures); 1443 } 1444} 1445 1446VKAPI_ATTR void VKAPI_CALL 1447GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties) { 1448 bool skipCall = false; 1449 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1450 assert(my_data != NULL); 1451 1452 skipCall |= parameter_validation_vkGetPhysicalDeviceFormatProperties(my_data->report_data, format, pFormatProperties); 1453 1454 if (!skipCall) { 1455 get_dispatch_table(pc_instance_table_map, physicalDevice) 1456 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); 1457 } 1458} 1459 1460VKAPI_ATTR VkResult VKAPI_CALL 1461GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, 1462 VkImageUsageFlags usage, VkImageCreateFlags flags, 1463 VkImageFormatProperties *pImageFormatProperties) { 1464 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1465 bool skipCall = false; 1466 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1467 assert(my_data != NULL); 1468 1469 skipCall |= parameter_validation_vkGetPhysicalDeviceImageFormatProperties(my_data->report_data, format, type, tiling, usage, flags, 1470 pImageFormatProperties); 1471 1472 if (!skipCall) { 1473 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 1474 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, 1475 pImageFormatProperties); 1476 1477 validate_result(my_data->report_data, "vkGetPhysicalDeviceImageFormatProperties", result); 1478 } 1479 1480 return result; 1481} 1482 1483VKAPI_ATTR void VKAPI_CALL 1484GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) { 1485 bool skipCall = false; 1486 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1487 assert(my_data != NULL); 1488 1489 skipCall |= parameter_validation_vkGetPhysicalDeviceProperties(my_data->report_data, pProperties); 1490 1491 if (!skipCall) { 1492 get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties); 1493 } 1494} 1495 1496VKAPI_ATTR void VKAPI_CALL 1497GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, 1498 VkQueueFamilyProperties *pQueueFamilyProperties) { 1499 bool skipCall = false; 1500 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1501 assert(my_data != NULL); 1502 1503 skipCall |= parameter_validation_vkGetPhysicalDeviceQueueFamilyProperties(my_data->report_data, pQueueFamilyPropertyCount, 1504 pQueueFamilyProperties); 1505 1506 if (!skipCall) { 1507 get_dispatch_table(pc_instance_table_map, physicalDevice) 1508 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); 1509 } 1510} 1511 1512VKAPI_ATTR void VKAPI_CALL 1513GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties) { 1514 bool skipCall = false; 1515 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1516 assert(my_data != NULL); 1517 1518 skipCall |= parameter_validation_vkGetPhysicalDeviceMemoryProperties(my_data->report_data, pMemoryProperties); 1519 1520 if (!skipCall) { 1521 get_dispatch_table(pc_instance_table_map, physicalDevice) 1522 ->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); 1523 } 1524} 1525 1526void validateDeviceCreateInfo(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, 1527 const std::vector<VkQueueFamilyProperties> properties) { 1528 std::unordered_set<uint32_t> set; 1529 1530 if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) { 1531 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) { 1532 if (set.count(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex)) { 1533 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1534 INVALID_USAGE, "PARAMCHECK", 1535 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex, is not unique within this " 1536 "structure.", 1537 i); 1538 } else { 1539 set.insert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex); 1540 } 1541 1542 if (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities != nullptr) { 1543 for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount; ++j) { 1544 if ((pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] < 0.f) || 1545 (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] > 1.f)) { 1546 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 1547 __LINE__, INVALID_USAGE, "PARAMCHECK", 1548 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->pQueuePriorities[%d], must be " 1549 "between 0 and 1. Actual value is %f", 1550 i, j, pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j]); 1551 } 1552 } 1553 } 1554 1555 if (pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex >= properties.size()) { 1556 log_msg( 1557 mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1558 INVALID_USAGE, "PARAMCHECK", 1559 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex cannot be more than the number " 1560 "of queue families.", 1561 i); 1562 } else if (pCreateInfo->pQueueCreateInfos[i].queueCount > 1563 properties[pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex].queueCount) { 1564 log_msg( 1565 mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1566 INVALID_USAGE, "PARAMCHECK", 1567 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueCount cannot be more than the number of " 1568 "queues for the given family index.", 1569 i); 1570 } 1571 } 1572 } 1573} 1574 1575void storeCreateDeviceData(VkDevice device, const VkDeviceCreateInfo *pCreateInfo) { 1576 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1577 1578 if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) { 1579 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) { 1580 my_device_data->queueFamilyIndexMap.insert( 1581 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount)); 1582 } 1583 } 1584} 1585 1586VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, 1587 const VkDeviceCreateInfo *pCreateInfo, 1588 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { 1589 /* 1590 * NOTE: We do not validate physicalDevice or any dispatchable 1591 * object as the first parameter. We couldn't get here if it was wrong! 1592 */ 1593 1594 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1595 bool skipCall = false; 1596 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1597 assert(my_instance_data != nullptr); 1598 1599 skipCall |= parameter_validation_vkCreateDevice(my_instance_data->report_data, pCreateInfo, pAllocator, pDevice); 1600 1601 if (pCreateInfo != NULL) { 1602 if ((pCreateInfo->enabledLayerCount > 0) && (pCreateInfo->ppEnabledLayerNames != NULL)) { 1603 for (size_t i = 0; i < pCreateInfo->enabledLayerCount; i++) { 1604 skipCall |= validate_string(my_instance_data->report_data, "vkCreateDevice", "pCreateInfo->ppEnabledLayerNames", 1605 pCreateInfo->ppEnabledLayerNames[i]); 1606 } 1607 } 1608 1609 if ((pCreateInfo->enabledExtensionCount > 0) && (pCreateInfo->ppEnabledExtensionNames != NULL)) { 1610 for (size_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 1611 skipCall |= validate_string(my_instance_data->report_data, "vkCreateDevice", "pCreateInfo->ppEnabledExtensionNames", 1612 pCreateInfo->ppEnabledExtensionNames[i]); 1613 } 1614 } 1615 } 1616 1617 if (!skipCall) { 1618 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 1619 assert(chain_info != nullptr); 1620 assert(chain_info->u.pLayerInfo != nullptr); 1621 1622 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 1623 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr; 1624 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice"); 1625 if (fpCreateDevice == NULL) { 1626 return VK_ERROR_INITIALIZATION_FAILED; 1627 } 1628 1629 // Advance the link info for the next element on the chain 1630 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 1631 1632 result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice); 1633 1634 validate_result(my_instance_data->report_data, "vkCreateDevice", result); 1635 1636 if (result == VK_SUCCESS) { 1637 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map); 1638 assert(my_device_data != nullptr); 1639 1640 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice); 1641 initDeviceTable(*pDevice, fpGetDeviceProcAddr, pc_device_table_map); 1642 1643 uint32_t count; 1644 get_dispatch_table(pc_instance_table_map, physicalDevice) 1645 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, nullptr); 1646 std::vector<VkQueueFamilyProperties> properties(count); 1647 get_dispatch_table(pc_instance_table_map, physicalDevice) 1648 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, &properties[0]); 1649 1650 validateDeviceCreateInfo(physicalDevice, pCreateInfo, properties); 1651 storeCreateDeviceData(*pDevice, pCreateInfo); 1652 } 1653 } 1654 1655 return result; 1656} 1657 1658VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { 1659 dispatch_key key = get_dispatch_key(device); 1660 bool skipCall = false; 1661 layer_data *my_data = get_my_data_ptr(key, layer_data_map); 1662 assert(my_data != NULL); 1663 1664 skipCall |= parameter_validation_vkDestroyDevice(my_data->report_data, pAllocator); 1665 1666 if (!skipCall) { 1667 layer_debug_report_destroy_device(device); 1668 1669#if DISPATCH_MAP_DEBUG 1670 fprintf(stderr, "Device: 0x%p, key: 0x%p\n", device, key); 1671#endif 1672 1673 get_dispatch_table(pc_device_table_map, device)->DestroyDevice(device, pAllocator); 1674 pc_device_table_map.erase(key); 1675 layer_data_map.erase(key); 1676 } 1677} 1678 1679bool PreGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex) { 1680 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1681 assert(my_device_data != nullptr); 1682 1683 validate_queue_family_index(my_device_data, "vkGetDeviceQueue", "queueFamilyIndex", queueFamilyIndex); 1684 1685 const auto &queue_data = my_device_data->queueFamilyIndexMap.find(queueFamilyIndex); 1686 if (queue_data->second <= queueIndex) { 1687 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1688 "PARAMCHECK", 1689 "VkGetDeviceQueue parameter, uint32_t queueIndex %d, must be less than the number of queues given when the device " 1690 "was created.", 1691 queueIndex); 1692 return false; 1693 } 1694 1695 return true; 1696} 1697 1698VKAPI_ATTR void VKAPI_CALL 1699GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) { 1700 bool skipCall = false; 1701 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1702 assert(my_data != NULL); 1703 1704 skipCall |= parameter_validation_vkGetDeviceQueue(my_data->report_data, queueFamilyIndex, queueIndex, pQueue); 1705 1706 if (!skipCall) { 1707 PreGetDeviceQueue(device, queueFamilyIndex, queueIndex); 1708 1709 get_dispatch_table(pc_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); 1710 } 1711} 1712 1713VKAPI_ATTR VkResult VKAPI_CALL 1714QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) { 1715 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1716 bool skipCall = false; 1717 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 1718 assert(my_data != NULL); 1719 1720 skipCall |= parameter_validation_vkQueueSubmit(my_data->report_data, submitCount, pSubmits, fence); 1721 1722 if (!skipCall) { 1723 result = get_dispatch_table(pc_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence); 1724 1725 validate_result(my_data->report_data, "vkQueueSubmit", result); 1726 } 1727 1728 return result; 1729} 1730 1731VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) { 1732 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 1733 assert(my_data != NULL); 1734 1735 VkResult result = get_dispatch_table(pc_device_table_map, queue)->QueueWaitIdle(queue); 1736 1737 validate_result(my_data->report_data, "vkQueueWaitIdle", result); 1738 1739 return result; 1740} 1741 1742VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) { 1743 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1744 assert(my_data != NULL); 1745 1746 VkResult result = get_dispatch_table(pc_device_table_map, device)->DeviceWaitIdle(device); 1747 1748 validate_result(my_data->report_data, "vkDeviceWaitIdle", result); 1749 1750 return result; 1751} 1752 1753VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, 1754 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) { 1755 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1756 bool skipCall = false; 1757 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1758 assert(my_data != NULL); 1759 1760 skipCall |= parameter_validation_vkAllocateMemory(my_data->report_data, pAllocateInfo, pAllocator, pMemory); 1761 1762 if (!skipCall) { 1763 result = get_dispatch_table(pc_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory); 1764 1765 validate_result(my_data->report_data, "vkAllocateMemory", result); 1766 } 1767 1768 return result; 1769} 1770 1771VKAPI_ATTR void VKAPI_CALL 1772FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) { 1773 bool skipCall = false; 1774 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1775 assert(my_data != NULL); 1776 1777 skipCall |= parameter_validation_vkFreeMemory(my_data->report_data, memory, pAllocator); 1778 1779 if (!skipCall) { 1780 get_dispatch_table(pc_device_table_map, device)->FreeMemory(device, memory, pAllocator); 1781 } 1782} 1783 1784VKAPI_ATTR VkResult VKAPI_CALL 1785MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData) { 1786 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1787 bool skipCall = false; 1788 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1789 assert(my_data != NULL); 1790 1791 skipCall |= parameter_validation_vkMapMemory(my_data->report_data, memory, offset, size, flags, ppData); 1792 1793 if (!skipCall) { 1794 result = get_dispatch_table(pc_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData); 1795 1796 validate_result(my_data->report_data, "vkMapMemory", result); 1797 } 1798 1799 return result; 1800} 1801 1802VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) { 1803 bool skipCall = false; 1804 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1805 assert(my_data != NULL); 1806 1807 skipCall |= parameter_validation_vkUnmapMemory(my_data->report_data, memory); 1808 1809 if (!skipCall) { 1810 get_dispatch_table(pc_device_table_map, device)->UnmapMemory(device, memory); 1811 } 1812} 1813 1814VKAPI_ATTR VkResult VKAPI_CALL 1815FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) { 1816 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1817 bool skipCall = false; 1818 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1819 assert(my_data != NULL); 1820 1821 skipCall |= parameter_validation_vkFlushMappedMemoryRanges(my_data->report_data, memoryRangeCount, pMemoryRanges); 1822 1823 if (!skipCall) { 1824 result = get_dispatch_table(pc_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 1825 1826 validate_result(my_data->report_data, "vkFlushMappedMemoryRanges", result); 1827 } 1828 1829 return result; 1830} 1831 1832VKAPI_ATTR VkResult VKAPI_CALL 1833InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) { 1834 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1835 bool skipCall = false; 1836 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1837 assert(my_data != NULL); 1838 1839 skipCall |= parameter_validation_vkInvalidateMappedMemoryRanges(my_data->report_data, memoryRangeCount, pMemoryRanges); 1840 1841 if (!skipCall) { 1842 result = 1843 get_dispatch_table(pc_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 1844 1845 validate_result(my_data->report_data, "vkInvalidateMappedMemoryRanges", result); 1846 } 1847 1848 return result; 1849} 1850 1851VKAPI_ATTR void VKAPI_CALL 1852GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize *pCommittedMemoryInBytes) { 1853 bool skipCall = false; 1854 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1855 assert(my_data != NULL); 1856 1857 skipCall |= parameter_validation_vkGetDeviceMemoryCommitment(my_data->report_data, memory, pCommittedMemoryInBytes); 1858 1859 if (!skipCall) { 1860 get_dispatch_table(pc_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); 1861 } 1862} 1863 1864VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, 1865 VkDeviceSize memoryOffset) { 1866 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1867 bool skipCall = false; 1868 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1869 assert(my_data != NULL); 1870 1871 skipCall |= parameter_validation_vkBindBufferMemory(my_data->report_data, buffer, memory, memoryOffset); 1872 1873 if (!skipCall) { 1874 result = get_dispatch_table(pc_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset); 1875 1876 validate_result(my_data->report_data, "vkBindBufferMemory", result); 1877 } 1878 1879 return result; 1880} 1881 1882VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, 1883 VkDeviceSize memoryOffset) { 1884 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1885 bool skipCall = false; 1886 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1887 assert(my_data != NULL); 1888 1889 skipCall |= parameter_validation_vkBindImageMemory(my_data->report_data, image, memory, memoryOffset); 1890 1891 if (!skipCall) { 1892 result = get_dispatch_table(pc_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset); 1893 1894 validate_result(my_data->report_data, "vkBindImageMemory", result); 1895 } 1896 1897 return result; 1898} 1899 1900VKAPI_ATTR void VKAPI_CALL 1901GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) { 1902 bool skipCall = false; 1903 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1904 assert(my_data != NULL); 1905 1906 skipCall |= parameter_validation_vkGetBufferMemoryRequirements(my_data->report_data, buffer, pMemoryRequirements); 1907 1908 if (!skipCall) { 1909 get_dispatch_table(pc_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements); 1910 } 1911} 1912 1913VKAPI_ATTR void VKAPI_CALL 1914GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) { 1915 bool skipCall = false; 1916 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1917 assert(my_data != NULL); 1918 1919 skipCall |= parameter_validation_vkGetImageMemoryRequirements(my_data->report_data, image, pMemoryRequirements); 1920 1921 if (!skipCall) { 1922 get_dispatch_table(pc_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements); 1923 } 1924} 1925 1926bool PostGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pNumRequirements, 1927 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { 1928 if (pSparseMemoryRequirements != nullptr) { 1929 if ((pSparseMemoryRequirements->formatProperties.aspectMask & 1930 (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 1931 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 1932 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1933 UNRECOGNIZED_VALUE, "PARAMCHECK", 1934 "vkGetImageSparseMemoryRequirements parameter, VkImageAspect " 1935 "pSparseMemoryRequirements->formatProperties.aspectMask, is an unrecognized enumerator"); 1936 return false; 1937 } 1938 } 1939 1940 return true; 1941} 1942 1943VKAPI_ATTR void VKAPI_CALL 1944GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, 1945 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { 1946 bool skipCall = false; 1947 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1948 assert(my_data != NULL); 1949 1950 skipCall |= parameter_validation_vkGetImageSparseMemoryRequirements(my_data->report_data, image, pSparseMemoryRequirementCount, 1951 pSparseMemoryRequirements); 1952 1953 if (!skipCall) { 1954 get_dispatch_table(pc_device_table_map, device) 1955 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 1956 1957 PostGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 1958 } 1959} 1960 1961bool PostGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, 1962 VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, 1963 uint32_t *pNumProperties, VkSparseImageFormatProperties *pProperties) { 1964 if (pProperties != nullptr) { 1965 if ((pProperties->aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 1966 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 1967 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1, 1968 "PARAMCHECK", 1969 "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkImageAspect pProperties->aspectMask, is an " 1970 "unrecognized enumerator"); 1971 return false; 1972 } 1973 } 1974 1975 return true; 1976} 1977 1978VKAPI_ATTR void VKAPI_CALL 1979GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, 1980 VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, 1981 uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties) { 1982 bool skipCall = false; 1983 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1984 assert(my_data != NULL); 1985 1986 skipCall |= parameter_validation_vkGetPhysicalDeviceSparseImageFormatProperties(my_data->report_data, format, type, samples, usage, 1987 tiling, pPropertyCount, pProperties); 1988 1989 if (!skipCall) { 1990 get_dispatch_table(pc_instance_table_map, physicalDevice) 1991 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, 1992 pProperties); 1993 1994 PostGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, 1995 pProperties); 1996 } 1997} 1998 1999VKAPI_ATTR VkResult VKAPI_CALL 2000QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence) { 2001 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2002 bool skipCall = false; 2003 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 2004 assert(my_data != NULL); 2005 2006 skipCall |= parameter_validation_vkQueueBindSparse(my_data->report_data, bindInfoCount, pBindInfo, fence); 2007 2008 if (!skipCall) { 2009 result = get_dispatch_table(pc_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence); 2010 2011 validate_result(my_data->report_data, "vkQueueBindSparse", result); 2012 } 2013 2014 return result; 2015} 2016 2017VKAPI_ATTR VkResult VKAPI_CALL 2018CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence) { 2019 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2020 bool skipCall = false; 2021 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2022 assert(my_data != NULL); 2023 2024 skipCall |= parameter_validation_vkCreateFence(my_data->report_data, pCreateInfo, pAllocator, pFence); 2025 2026 if (!skipCall) { 2027 result = get_dispatch_table(pc_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence); 2028 2029 validate_result(my_data->report_data, "vkCreateFence", result); 2030 } 2031 2032 return result; 2033} 2034 2035VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) { 2036 bool skipCall = false; 2037 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2038 assert(my_data != NULL); 2039 2040 skipCall |= parameter_validation_vkDestroyFence(my_data->report_data, fence, pAllocator); 2041 2042 if (!skipCall) { 2043 get_dispatch_table(pc_device_table_map, device)->DestroyFence(device, fence, pAllocator); 2044 } 2045} 2046 2047VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) { 2048 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2049 bool skipCall = false; 2050 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2051 assert(my_data != NULL); 2052 2053 skipCall |= parameter_validation_vkResetFences(my_data->report_data, fenceCount, pFences); 2054 2055 if (!skipCall) { 2056 result = get_dispatch_table(pc_device_table_map, device)->ResetFences(device, fenceCount, pFences); 2057 2058 validate_result(my_data->report_data, "vkResetFences", result); 2059 } 2060 2061 return result; 2062} 2063 2064VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) { 2065 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2066 bool skipCall = false; 2067 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2068 assert(my_data != NULL); 2069 2070 skipCall |= parameter_validation_vkGetFenceStatus(my_data->report_data, fence); 2071 2072 if (!skipCall) { 2073 result = get_dispatch_table(pc_device_table_map, device)->GetFenceStatus(device, fence); 2074 2075 validate_result(my_data->report_data, "vkGetFenceStatus", result); 2076 } 2077 2078 return result; 2079} 2080 2081VKAPI_ATTR VkResult VKAPI_CALL 2082WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout) { 2083 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2084 bool skipCall = false; 2085 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2086 assert(my_data != NULL); 2087 2088 skipCall |= parameter_validation_vkWaitForFences(my_data->report_data, fenceCount, pFences, waitAll, timeout); 2089 2090 if (!skipCall) { 2091 result = get_dispatch_table(pc_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout); 2092 2093 validate_result(my_data->report_data, "vkWaitForFences", result); 2094 } 2095 2096 return result; 2097} 2098 2099VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, 2100 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) { 2101 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2102 bool skipCall = false; 2103 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2104 assert(my_data != NULL); 2105 2106 skipCall |= parameter_validation_vkCreateSemaphore(my_data->report_data, pCreateInfo, pAllocator, pSemaphore); 2107 2108 if (!skipCall) { 2109 result = get_dispatch_table(pc_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); 2110 2111 validate_result(my_data->report_data, "vkCreateSemaphore", result); 2112 } 2113 2114 return result; 2115} 2116 2117VKAPI_ATTR void VKAPI_CALL 2118DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) { 2119 bool skipCall = false; 2120 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2121 assert(my_data != NULL); 2122 2123 skipCall |= parameter_validation_vkDestroySemaphore(my_data->report_data, semaphore, pAllocator); 2124 2125 if (!skipCall) { 2126 get_dispatch_table(pc_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator); 2127 } 2128} 2129 2130VKAPI_ATTR VkResult VKAPI_CALL 2131CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) { 2132 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2133 bool skipCall = false; 2134 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2135 assert(my_data != NULL); 2136 2137 skipCall |= parameter_validation_vkCreateEvent(my_data->report_data, pCreateInfo, pAllocator, pEvent); 2138 2139 if (!skipCall) { 2140 result = get_dispatch_table(pc_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent); 2141 2142 validate_result(my_data->report_data, "vkCreateEvent", result); 2143 } 2144 2145 return result; 2146} 2147 2148VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) { 2149 bool skipCall = false; 2150 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2151 assert(my_data != NULL); 2152 2153 skipCall |= parameter_validation_vkDestroyEvent(my_data->report_data, event, pAllocator); 2154 2155 if (!skipCall) { 2156 get_dispatch_table(pc_device_table_map, device)->DestroyEvent(device, event, pAllocator); 2157 } 2158} 2159 2160VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) { 2161 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2162 bool skipCall = false; 2163 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2164 assert(my_data != NULL); 2165 2166 skipCall |= parameter_validation_vkGetEventStatus(my_data->report_data, event); 2167 2168 if (!skipCall) { 2169 result = get_dispatch_table(pc_device_table_map, device)->GetEventStatus(device, event); 2170 2171 validate_result(my_data->report_data, "vkGetEventStatus", result); 2172 } 2173 2174 return result; 2175} 2176 2177VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) { 2178 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2179 bool skipCall = false; 2180 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2181 assert(my_data != NULL); 2182 2183 skipCall |= parameter_validation_vkSetEvent(my_data->report_data, event); 2184 2185 if (!skipCall) { 2186 result = get_dispatch_table(pc_device_table_map, device)->SetEvent(device, event); 2187 2188 validate_result(my_data->report_data, "vkSetEvent", result); 2189 } 2190 2191 return result; 2192} 2193 2194VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) { 2195 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2196 bool skipCall = false; 2197 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2198 assert(my_data != NULL); 2199 2200 skipCall |= parameter_validation_vkResetEvent(my_data->report_data, event); 2201 2202 if (!skipCall) { 2203 result = get_dispatch_table(pc_device_table_map, device)->ResetEvent(device, event); 2204 2205 validate_result(my_data->report_data, "vkResetEvent", result); 2206 } 2207 2208 return result; 2209} 2210 2211VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, 2212 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) { 2213 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2214 bool skip_call = false; 2215 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2216 assert(device_data != nullptr); 2217 debug_report_data *report_data = device_data->report_data; 2218 2219 skip_call |= parameter_validation_vkCreateQueryPool(device_data->report_data, pCreateInfo, pAllocator, pQueryPool); 2220 2221 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2222 if (pCreateInfo != nullptr) { 2223 // If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of 2224 // VkQueryPipelineStatisticFlagBits values 2225 if ((pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) && (pCreateInfo->pipelineStatistics != 0) && 2226 ((pCreateInfo->pipelineStatistics & (~AllVkQueryPipelineStatisticFlagBits)) != 0)) { 2227 skip_call |= 2228 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2229 UNRECOGNIZED_VALUE, LayerName, "vkCreateQueryPool: if pCreateInfo->queryType is " 2230 "VK_QUERY_TYPE_PIPELINE_STATISTICS, pCreateInfo->pipelineStatistics must be " 2231 "a valid combination of VkQueryPipelineStatisticFlagBits values"); 2232 } 2233 } 2234 2235 if (!skip_call) { 2236 result = get_dispatch_table(pc_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); 2237 2238 validate_result(report_data, "vkCreateQueryPool", result); 2239 } 2240 2241 return result; 2242} 2243 2244VKAPI_ATTR void VKAPI_CALL 2245DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) { 2246 bool skipCall = false; 2247 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2248 assert(my_data != NULL); 2249 2250 skipCall |= parameter_validation_vkDestroyQueryPool(my_data->report_data, queryPool, pAllocator); 2251 2252 if (!skipCall) { 2253 get_dispatch_table(pc_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator); 2254 } 2255} 2256 2257VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, 2258 uint32_t queryCount, size_t dataSize, void *pData, 2259 VkDeviceSize stride, VkQueryResultFlags flags) { 2260 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2261 bool skipCall = false; 2262 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2263 assert(my_data != NULL); 2264 2265 skipCall |= 2266 parameter_validation_vkGetQueryPoolResults(my_data->report_data, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); 2267 2268 if (!skipCall) { 2269 result = get_dispatch_table(pc_device_table_map, device) 2270 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); 2271 2272 validate_result(my_data->report_data, "vkGetQueryPoolResults", result); 2273 } 2274 2275 return result; 2276} 2277 2278VKAPI_ATTR VkResult VKAPI_CALL 2279CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) { 2280 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2281 bool skip_call = false; 2282 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2283 assert(device_data != nullptr); 2284 debug_report_data *report_data = device_data->report_data; 2285 2286 skip_call |= parameter_validation_vkCreateBuffer(report_data, pCreateInfo, pAllocator, pBuffer); 2287 2288 if (pCreateInfo != nullptr) { 2289 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2290 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) { 2291 // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 2292 if (pCreateInfo->queueFamilyIndexCount <= 1) { 2293 skip_call |= 2294 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2295 INVALID_USAGE, LayerName, "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2296 "pCreateInfo->queueFamilyIndexCount must be greater than 1"); 2297 } 2298 2299 // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of 2300 // queueFamilyIndexCount uint32_t values 2301 if (pCreateInfo->pQueueFamilyIndices == nullptr) { 2302 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2303 __LINE__, REQUIRED_PARAMETER, LayerName, 2304 "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2305 "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of " 2306 "pCreateInfo->queueFamilyIndexCount uint32_t values"); 2307 } 2308 2309 // Ensure that the queue family indices were specified at device creation 2310 skip_call |= validate_queue_family_indices(device_data, "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices", 2311 pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices); 2312 } 2313 } 2314 2315 if (!skip_call) { 2316 result = get_dispatch_table(pc_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer); 2317 2318 validate_result(report_data, "vkCreateBuffer", result); 2319 } 2320 2321 return result; 2322} 2323 2324VKAPI_ATTR void VKAPI_CALL 2325DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) { 2326 bool skipCall = false; 2327 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2328 assert(my_data != NULL); 2329 2330 skipCall |= parameter_validation_vkDestroyBuffer(my_data->report_data, buffer, pAllocator); 2331 2332 if (!skipCall) { 2333 get_dispatch_table(pc_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator); 2334 } 2335} 2336 2337VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, 2338 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) { 2339 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2340 bool skipCall = false; 2341 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2342 assert(my_data != NULL); 2343 2344 skipCall |= parameter_validation_vkCreateBufferView(my_data->report_data, pCreateInfo, pAllocator, pView); 2345 2346 if (!skipCall) { 2347 result = get_dispatch_table(pc_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView); 2348 2349 validate_result(my_data->report_data, "vkCreateBufferView", result); 2350 } 2351 2352 return result; 2353} 2354 2355VKAPI_ATTR void VKAPI_CALL 2356DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) { 2357 bool skipCall = false; 2358 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2359 assert(my_data != NULL); 2360 2361 skipCall |= parameter_validation_vkDestroyBufferView(my_data->report_data, bufferView, pAllocator); 2362 2363 if (!skipCall) { 2364 get_dispatch_table(pc_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator); 2365 } 2366} 2367 2368VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, 2369 const VkAllocationCallbacks *pAllocator, VkImage *pImage) { 2370 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2371 bool skip_call = false; 2372 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2373 assert(device_data != nullptr); 2374 debug_report_data *report_data = device_data->report_data; 2375 2376 skip_call |= parameter_validation_vkCreateImage(report_data, pCreateInfo, pAllocator, pImage); 2377 2378 if (pCreateInfo != nullptr) { 2379 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2380 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) { 2381 // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 2382 if (pCreateInfo->queueFamilyIndexCount <= 1) { 2383 skip_call |= 2384 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2385 INVALID_USAGE, LayerName, "vkCreateImage: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2386 "pCreateInfo->queueFamilyIndexCount must be greater than 1"); 2387 } 2388 2389 // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of 2390 // queueFamilyIndexCount uint32_t values 2391 if (pCreateInfo->pQueueFamilyIndices == nullptr) { 2392 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2393 __LINE__, REQUIRED_PARAMETER, LayerName, 2394 "vkCreateImage: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2395 "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of " 2396 "pCreateInfo->queueFamilyIndexCount uint32_t values"); 2397 } 2398 2399 skip_call |= validate_queue_family_indices(device_data, "vkCreateImage", "pCreateInfo->pQueueFamilyIndices", 2400 pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices); 2401 } 2402 2403 // width, height, and depth members of extent must be greater than 0 2404 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.width", pCreateInfo->extent.width, 2405 0u); 2406 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.height", pCreateInfo->extent.height, 2407 0u); 2408 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.depth", pCreateInfo->extent.depth, 2409 0u); 2410 2411 // mipLevels must be greater than 0 2412 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->mipLevels", pCreateInfo->mipLevels, 2413 0u); 2414 2415 // arrayLayers must be greater than 0 2416 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->arrayLayers", pCreateInfo->arrayLayers, 2417 0u); 2418 2419 // If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1 2420 if ((pCreateInfo->imageType == VK_IMAGE_TYPE_1D) && (pCreateInfo->extent.height != 1) && (pCreateInfo->extent.depth != 1)) { 2421 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2422 LayerName, "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_1D, both " 2423 "pCreateInfo->extent.height and pCreateInfo->extent.depth must be 1"); 2424 } 2425 2426 if (pCreateInfo->imageType == VK_IMAGE_TYPE_2D) { 2427 // If imageType is VK_IMAGE_TYPE_2D and flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and 2428 // extent.height must be equal 2429 if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && 2430 (pCreateInfo->extent.width != pCreateInfo->extent.height)) { 2431 skip_call |= 2432 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2433 LayerName, "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_2D and " 2434 "pCreateInfo->flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, " 2435 "pCreateInfo->extent.width and pCreateInfo->extent.height must be equal"); 2436 } 2437 2438 if (pCreateInfo->extent.depth != 1) { 2439 skip_call |= 2440 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2441 LayerName, 2442 "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_2D, pCreateInfo->extent.depth must be 1"); 2443 } 2444 } 2445 2446 // mipLevels must be less than or equal to floor(log2(max(extent.width,extent.height,extent.depth)))+1 2447 uint32_t maxDim = std::max(std::max(pCreateInfo->extent.width, pCreateInfo->extent.height), pCreateInfo->extent.depth); 2448 if (pCreateInfo->mipLevels > (floor(log2(maxDim)) + 1)) { 2449 skip_call |= log_msg( 2450 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName, 2451 "vkCreateImage: pCreateInfo->mipLevels must be less than or equal to " 2452 "floor(log2(max(pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth)))+1"); 2453 } 2454 2455 // If flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain 2456 // VK_IMAGE_CREATE_SPARSE_BINDING_BIT 2457 if (((pCreateInfo->flags & (VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_IMAGE_CREATE_SPARSE_ALIASED_BIT)) != 0) && 2458 ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) != VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) { 2459 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2460 LayerName, 2461 "vkCreateImage: pCreateInfo->flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or " 2462 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT"); 2463 } 2464 } 2465 2466 if (!skip_call) { 2467 result = get_dispatch_table(pc_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage); 2468 2469 validate_result(report_data, "vkCreateImage", result); 2470 } 2471 2472 return result; 2473} 2474 2475VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) { 2476 bool skipCall = false; 2477 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2478 assert(my_data != NULL); 2479 2480 skipCall |= parameter_validation_vkDestroyImage(my_data->report_data, image, pAllocator); 2481 2482 if (!skipCall) { 2483 get_dispatch_table(pc_device_table_map, device)->DestroyImage(device, image, pAllocator); 2484 } 2485} 2486 2487bool PreGetImageSubresourceLayout(VkDevice device, const VkImageSubresource *pSubresource) { 2488 if (pSubresource != nullptr) { 2489 if ((pSubresource->aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 2490 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 2491 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2492 UNRECOGNIZED_VALUE, "PARAMCHECK", 2493 "vkGetImageSubresourceLayout parameter, VkImageAspect pSubresource->aspectMask, is an unrecognized enumerator"); 2494 return false; 2495 } 2496 } 2497 2498 return true; 2499} 2500 2501VKAPI_ATTR void VKAPI_CALL 2502GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout) { 2503 bool skipCall = false; 2504 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2505 assert(my_data != NULL); 2506 2507 skipCall |= parameter_validation_vkGetImageSubresourceLayout(my_data->report_data, image, pSubresource, pLayout); 2508 2509 if (!skipCall) { 2510 PreGetImageSubresourceLayout(device, pSubresource); 2511 2512 get_dispatch_table(pc_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout); 2513 } 2514} 2515 2516VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, 2517 const VkAllocationCallbacks *pAllocator, VkImageView *pView) { 2518 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2519 bool skip_call = false; 2520 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2521 assert(my_data != NULL); 2522 debug_report_data *report_data = my_data->report_data; 2523 2524 skip_call |= parameter_validation_vkCreateImageView(report_data, pCreateInfo, pAllocator, pView); 2525 2526 if (pCreateInfo != nullptr) { 2527 if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) || (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D)) { 2528 if (pCreateInfo->subresourceRange.layerCount != 1) { 2529 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2530 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD, " 2531 "pCreateInfo->subresourceRange.layerCount must be 1", 2532 ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) ? 1 : 2)); 2533 } 2534 } else if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) || 2535 (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY)) { 2536 if (pCreateInfo->subresourceRange.layerCount < 1) { 2537 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2538 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD_ARRAY, " 2539 "pCreateInfo->subresourceRange.layerCount must be >= 1", 2540 ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? 1 : 2)); 2541 } 2542 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE) { 2543 if (pCreateInfo->subresourceRange.layerCount != 6) { 2544 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2545 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE, " 2546 "pCreateInfo->subresourceRange.layerCount must be 6"); 2547 } 2548 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { 2549 if ((pCreateInfo->subresourceRange.layerCount == 0) || ((pCreateInfo->subresourceRange.layerCount % 6) != 0)) { 2550 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2551 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE_ARRAY, " 2552 "pCreateInfo->subresourceRange.layerCount must be a multiple of 6"); 2553 } 2554 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_3D) { 2555 if (pCreateInfo->subresourceRange.baseArrayLayer != 0) { 2556 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2557 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, " 2558 "pCreateInfo->subresourceRange.baseArrayLayer must be 0"); 2559 } 2560 2561 if (pCreateInfo->subresourceRange.layerCount != 1) { 2562 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2563 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, " 2564 "pCreateInfo->subresourceRange.layerCount must be 1"); 2565 } 2566 } 2567 } 2568 2569 if (!skip_call) { 2570 result = get_dispatch_table(pc_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView); 2571 2572 validate_result(my_data->report_data, "vkCreateImageView", result); 2573 } 2574 2575 return result; 2576} 2577 2578VKAPI_ATTR void VKAPI_CALL 2579DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) { 2580 bool skipCall = false; 2581 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2582 assert(my_data != NULL); 2583 2584 skipCall |= parameter_validation_vkDestroyImageView(my_data->report_data, imageView, pAllocator); 2585 2586 if (!skipCall) { 2587 get_dispatch_table(pc_device_table_map, device)->DestroyImageView(device, imageView, pAllocator); 2588 } 2589} 2590 2591VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, 2592 const VkAllocationCallbacks *pAllocator, 2593 VkShaderModule *pShaderModule) { 2594 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2595 bool skipCall = false; 2596 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2597 assert(my_data != NULL); 2598 2599 skipCall |= parameter_validation_vkCreateShaderModule(my_data->report_data, pCreateInfo, pAllocator, pShaderModule); 2600 2601 if (!skipCall) { 2602 result = 2603 get_dispatch_table(pc_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 2604 2605 validate_result(my_data->report_data, "vkCreateShaderModule", result); 2606 } 2607 2608 return result; 2609} 2610 2611VKAPI_ATTR void VKAPI_CALL 2612DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator) { 2613 bool skipCall = false; 2614 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2615 assert(my_data != NULL); 2616 2617 skipCall |= parameter_validation_vkDestroyShaderModule(my_data->report_data, shaderModule, pAllocator); 2618 2619 if (!skipCall) { 2620 get_dispatch_table(pc_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator); 2621 } 2622} 2623 2624VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, 2625 const VkAllocationCallbacks *pAllocator, 2626 VkPipelineCache *pPipelineCache) { 2627 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2628 bool skipCall = false; 2629 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2630 assert(my_data != NULL); 2631 2632 skipCall |= parameter_validation_vkCreatePipelineCache(my_data->report_data, pCreateInfo, pAllocator, pPipelineCache); 2633 2634 if (!skipCall) { 2635 result = 2636 get_dispatch_table(pc_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); 2637 2638 validate_result(my_data->report_data, "vkCreatePipelineCache", result); 2639 } 2640 2641 return result; 2642} 2643 2644VKAPI_ATTR void VKAPI_CALL 2645DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator) { 2646 bool skipCall = false; 2647 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2648 assert(my_data != NULL); 2649 2650 skipCall |= parameter_validation_vkDestroyPipelineCache(my_data->report_data, pipelineCache, pAllocator); 2651 2652 if (!skipCall) { 2653 get_dispatch_table(pc_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator); 2654 } 2655} 2656 2657VKAPI_ATTR VkResult VKAPI_CALL 2658GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData) { 2659 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2660 bool skipCall = false; 2661 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2662 assert(my_data != NULL); 2663 2664 skipCall |= parameter_validation_vkGetPipelineCacheData(my_data->report_data, pipelineCache, pDataSize, pData); 2665 2666 if (!skipCall) { 2667 result = get_dispatch_table(pc_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData); 2668 2669 validate_result(my_data->report_data, "vkGetPipelineCacheData", result); 2670 } 2671 2672 return result; 2673} 2674 2675VKAPI_ATTR VkResult VKAPI_CALL 2676MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches) { 2677 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2678 bool skipCall = false; 2679 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2680 assert(my_data != NULL); 2681 2682 skipCall |= parameter_validation_vkMergePipelineCaches(my_data->report_data, dstCache, srcCacheCount, pSrcCaches); 2683 2684 if (!skipCall) { 2685 result = get_dispatch_table(pc_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); 2686 2687 validate_result(my_data->report_data, "vkMergePipelineCaches", result); 2688 } 2689 2690 return result; 2691} 2692 2693bool PreCreateGraphicsPipelines(VkDevice device, const VkGraphicsPipelineCreateInfo *pCreateInfos) { 2694 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2695 2696 // TODO: Handle count 2697 if (pCreateInfos != nullptr) { 2698 if (pCreateInfos->flags | VK_PIPELINE_CREATE_DERIVATIVE_BIT) { 2699 if (pCreateInfos->basePipelineIndex != -1) { 2700 if (pCreateInfos->basePipelineHandle != VK_NULL_HANDLE) { 2701 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2702 INVALID_USAGE, "PARAMCHECK", 2703 "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineHandle, must be VK_NULL_HANDLE if " 2704 "pCreateInfos->flags " 2705 "contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag and pCreateInfos->basePipelineIndex is not -1"); 2706 return false; 2707 } 2708 } 2709 2710 if (pCreateInfos->basePipelineHandle != VK_NULL_HANDLE) { 2711 if (pCreateInfos->basePipelineIndex != -1) { 2712 log_msg( 2713 mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2714 INVALID_USAGE, "PARAMCHECK", 2715 "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineIndex, must be -1 if pCreateInfos->flags " 2716 "contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag and pCreateInfos->basePipelineHandle is not " 2717 "VK_NULL_HANDLE"); 2718 return false; 2719 } 2720 } 2721 } 2722 2723 if (pCreateInfos->pRasterizationState != nullptr) { 2724 if (pCreateInfos->pRasterizationState->cullMode & ~VK_CULL_MODE_FRONT_AND_BACK) { 2725 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2726 UNRECOGNIZED_VALUE, "PARAMCHECK", 2727 "vkCreateGraphicsPipelines parameter, VkCullMode pCreateInfos->pRasterizationState->cullMode, is an " 2728 "unrecognized enumerator"); 2729 return false; 2730 } 2731 } 2732 2733 int i = 0; 2734 for (size_t j = 0; j < pCreateInfos[i].stageCount; j++) { 2735 validate_string(data->report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pStages[j].pName", 2736 pCreateInfos[i].pStages[j].pName); 2737 } 2738 } 2739 2740 return true; 2741} 2742 2743VKAPI_ATTR VkResult VKAPI_CALL 2744CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 2745 const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, 2746 VkPipeline *pPipelines) { 2747 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2748 bool skip_call = false; 2749 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2750 assert(device_data != nullptr); 2751 debug_report_data *report_data = device_data->report_data; 2752 2753 skip_call |= parameter_validation_vkCreateGraphicsPipelines(report_data, pipelineCache, createInfoCount, pCreateInfos, 2754 pAllocator, pPipelines); 2755 2756 if (pCreateInfos != nullptr) { 2757 for (uint32_t i = 0; i < createInfoCount; ++i) { 2758 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2759 if (pCreateInfos[i].pTessellationState == nullptr) { 2760 if (pCreateInfos[i].pStages != nullptr) { 2761 // If pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, 2762 // pTessellationState must not be NULL 2763 bool has_control = false; 2764 bool has_eval = false; 2765 2766 for (uint32_t stage_index = 0; stage_index < pCreateInfos[i].stageCount; ++stage_index) { 2767 if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) { 2768 has_control = true; 2769 } else if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) { 2770 has_eval = true; 2771 } 2772 } 2773 2774 if (has_control && has_eval) { 2775 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2776 __LINE__, REQUIRED_PARAMETER, LayerName, 2777 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pStages includes a tessellation " 2778 "control shader stage and a tessellation evaluation shader stage, " 2779 "pCreateInfos[%d].pTessellationState must not be NULL", 2780 i, i); 2781 } 2782 } 2783 } else { 2784 skip_call |= 2785 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->pNext", 2786 NULL, pCreateInfos[i].pTessellationState->pNext, 0, NULL); 2787 2788 skip_call |= 2789 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->flags", 2790 pCreateInfos[i].pTessellationState->flags); 2791 2792 if (pCreateInfos[i].pTessellationState->sType != VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO) { 2793 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2794 __LINE__, INVALID_STRUCT_STYPE, LayerName, 2795 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pTessellationState->sType must be " 2796 "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO", 2797 i); 2798 } 2799 } 2800 2801 if (pCreateInfos[i].pViewportState == nullptr) { 2802 // If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a pointer to a 2803 // valid VkPipelineViewportStateCreateInfo structure 2804 if ((pCreateInfos[i].pRasterizationState != nullptr) && 2805 (pCreateInfos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) { 2806 skip_call |= log_msg( 2807 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2808 REQUIRED_PARAMETER, LayerName, 2809 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pRasterizationState->rasterizerDiscardEnable is VK_FALSE, " 2810 "pCreateInfos[%d].pViewportState must be a pointer to a valid VkPipelineViewportStateCreateInfo structure", 2811 i, i); 2812 } 2813 } else { 2814 skip_call |= 2815 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pViewportState->pNext", NULL, 2816 pCreateInfos[i].pViewportState->pNext, 0, NULL); 2817 2818 skip_call |= 2819 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pViewportState->flags", 2820 pCreateInfos[i].pViewportState->flags); 2821 2822 if (pCreateInfos[i].pViewportState->sType != VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO) { 2823 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2824 __LINE__, INVALID_STRUCT_STYPE, LayerName, 2825 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pViewportState->sType must be " 2826 "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO", 2827 i); 2828 } 2829 2830 if (pCreateInfos[i].pDynamicState != nullptr) { 2831 bool has_dynamic_viewport = false; 2832 bool has_dynamic_scissor = false; 2833 2834 for (uint32_t state_index = 0; state_index < pCreateInfos[i].pDynamicState->dynamicStateCount; ++state_index) { 2835 if (pCreateInfos[i].pDynamicState->pDynamicStates[state_index] == VK_DYNAMIC_STATE_VIEWPORT) { 2836 has_dynamic_viewport = true; 2837 } else if (pCreateInfos[i].pDynamicState->pDynamicStates[state_index] == VK_DYNAMIC_STATE_SCISSOR) { 2838 has_dynamic_scissor = true; 2839 } 2840 } 2841 2842 // viewportCount must be greater than 0 2843 // TODO: viewportCount must be 1 when multiple_viewport feature is not enabled 2844 if (pCreateInfos[i].pViewportState->viewportCount == 0) { 2845 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2846 __LINE__, REQUIRED_PARAMETER, LayerName, 2847 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates " 2848 "contains VK_DYNAMIC_STATE_VIEWPORT, pCreateInfos[%d].pViewportState->viewportCount " 2849 "must be greater than 0", 2850 i, i); 2851 } 2852 2853 // If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports 2854 // member of pViewportState must be a pointer to an array of pViewportState->viewportCount VkViewport structures 2855 if (!has_dynamic_viewport && (pCreateInfos[i].pViewportState->pViewports == nullptr)) { 2856 skip_call |= 2857 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2858 __LINE__, REQUIRED_PARAMETER, LayerName, 2859 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates contains " 2860 "VK_DYNAMIC_STATE_VIEWPORT, pCreateInfos[%d].pViewportState->pViewports must not be NULL", 2861 i, i); 2862 } 2863 2864 // scissorCount must be greater than 0 2865 // TODO: scissorCount must be 1 when multiple_viewport feature is not enabled 2866 if (pCreateInfos[i].pViewportState->scissorCount == 0) { 2867 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2868 __LINE__, REQUIRED_PARAMETER, LayerName, 2869 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates " 2870 "contains VK_DYNAMIC_STATE_SCISSOR, pCreateInfos[%d].pViewportState->scissorCount " 2871 "must be greater than 0", 2872 i, i); 2873 } 2874 2875 // If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member 2876 // of pViewportState must be a pointer to an array of pViewportState->scissorCount VkRect2D structures 2877 if (!has_dynamic_scissor && (pCreateInfos[i].pViewportState->pScissors == nullptr)) { 2878 skip_call |= 2879 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2880 __LINE__, REQUIRED_PARAMETER, LayerName, 2881 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates contains " 2882 "VK_DYNAMIC_STATE_SCISSOR, pCreateInfos[%d].pViewportState->pScissors must not be NULL", 2883 i, i); 2884 } 2885 } 2886 } 2887 2888 if (pCreateInfos[i].pMultisampleState == nullptr) { 2889 // If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pMultisampleState must be a pointer to 2890 // a valid VkPipelineMultisampleStateCreateInfo structure 2891 if ((pCreateInfos[i].pRasterizationState != nullptr) && 2892 pCreateInfos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE) { 2893 skip_call |= 2894 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2895 REQUIRED_PARAMETER, LayerName, "vkCreateGraphicsPipelines: if " 2896 "pCreateInfos[%d].pRasterizationState->rasterizerDiscardEnable is " 2897 "VK_FALSE, pCreateInfos[%d].pMultisampleState must not be NULL", 2898 i, i); 2899 } 2900 } else { 2901 skip_call |= 2902 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->pNext", 2903 NULL, pCreateInfos[i].pMultisampleState->pNext, 0, NULL); 2904 2905 skip_call |= 2906 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->flags", 2907 pCreateInfos[i].pMultisampleState->flags); 2908 2909 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2910 "pCreateInfos[i].pMultisampleState->sampleShadingEnable", 2911 pCreateInfos[i].pMultisampleState->sampleShadingEnable); 2912 2913 skip_call |= validate_array( 2914 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->rasterizationSamples", 2915 "pCreateInfos[i].pMultisampleState->pSampleMask", pCreateInfos[i].pMultisampleState->rasterizationSamples, 2916 pCreateInfos[i].pMultisampleState->pSampleMask, true, false); 2917 2918 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2919 "pCreateInfos[i].pMultisampleState->alphaToCoverageEnable", 2920 pCreateInfos[i].pMultisampleState->alphaToCoverageEnable); 2921 2922 skip_call |= 2923 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->alphaToOneEnable", 2924 pCreateInfos[i].pMultisampleState->alphaToOneEnable); 2925 2926 if (pCreateInfos[i].pMultisampleState->sType != VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO) { 2927 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2928 __LINE__, INVALID_STRUCT_STYPE, LayerName, 2929 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pMultisampleState->sType must be " 2930 "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO", 2931 i); 2932 } 2933 } 2934 2935 // TODO: Conditional NULL check based on rasterizerDiscardEnable and subpass 2936 if (pCreateInfos[i].pDepthStencilState != nullptr) { 2937 skip_call |= 2938 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->pNext", 2939 NULL, pCreateInfos[i].pDepthStencilState->pNext, 0, NULL); 2940 2941 skip_call |= 2942 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->flags", 2943 pCreateInfos[i].pDepthStencilState->flags); 2944 2945 skip_call |= 2946 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->depthTestEnable", 2947 pCreateInfos[i].pDepthStencilState->depthTestEnable); 2948 2949 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2950 "pCreateInfos[i].pDepthStencilState->depthWriteEnable", 2951 pCreateInfos[i].pDepthStencilState->depthWriteEnable); 2952 2953 skip_call |= validate_ranged_enum( 2954 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->depthCompareOp", "VkCompareOp", 2955 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->depthCompareOp); 2956 2957 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2958 "pCreateInfos[i].pDepthStencilState->depthBoundsTestEnable", 2959 pCreateInfos[i].pDepthStencilState->depthBoundsTestEnable); 2960 2961 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2962 "pCreateInfos[i].pDepthStencilState->stencilTestEnable", 2963 pCreateInfos[i].pDepthStencilState->stencilTestEnable); 2964 2965 skip_call |= validate_ranged_enum( 2966 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.failOp", "VkStencilOp", 2967 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.failOp); 2968 2969 skip_call |= validate_ranged_enum( 2970 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.passOp", "VkStencilOp", 2971 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.passOp); 2972 2973 skip_call |= validate_ranged_enum(report_data, "vkCreateGraphicsPipelines", 2974 "pCreateInfos[i].pDepthStencilState->front.depthFailOp", "VkStencilOp", 2975 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, 2976 pCreateInfos[i].pDepthStencilState->front.depthFailOp); 2977 2978 skip_call |= validate_ranged_enum( 2979 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.compareOp", "VkCompareOp", 2980 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.compareOp); 2981 2982 skip_call |= validate_ranged_enum( 2983 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.failOp", "VkStencilOp", 2984 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.failOp); 2985 2986 skip_call |= validate_ranged_enum( 2987 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.passOp", "VkStencilOp", 2988 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.passOp); 2989 2990 skip_call |= validate_ranged_enum( 2991 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.depthFailOp", "VkStencilOp", 2992 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.depthFailOp); 2993 2994 skip_call |= validate_ranged_enum( 2995 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.compareOp", "VkCompareOp", 2996 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.compareOp); 2997 2998 if (pCreateInfos[i].pDepthStencilState->sType != VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO) { 2999 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3000 __LINE__, INVALID_STRUCT_STYPE, LayerName, 3001 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pDepthStencilState->sType must be " 3002 "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO", 3003 i); 3004 } 3005 } 3006 3007 // TODO: Conditional NULL check based on rasterizerDiscardEnable and subpass 3008 if (pCreateInfos[i].pColorBlendState != nullptr) { 3009 skip_call |= 3010 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->pNext", NULL, 3011 pCreateInfos[i].pColorBlendState->pNext, 0, NULL); 3012 3013 skip_call |= 3014 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->flags", 3015 pCreateInfos[i].pColorBlendState->flags); 3016 3017 skip_call |= 3018 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->logicOpEnable", 3019 pCreateInfos[i].pColorBlendState->logicOpEnable); 3020 3021 skip_call |= validate_array( 3022 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->attachmentCount", 3023 "pCreateInfos[i].pColorBlendState->pAttachments", pCreateInfos[i].pColorBlendState->attachmentCount, 3024 pCreateInfos[i].pColorBlendState->pAttachments, false, true); 3025 3026 if (pCreateInfos[i].pColorBlendState->pAttachments != NULL) { 3027 for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfos[i].pColorBlendState->attachmentCount; 3028 ++attachmentIndex) { 3029 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 3030 "pCreateInfos[i].pColorBlendState->pAttachments[i].blendEnable", 3031 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].blendEnable); 3032 3033 skip_call |= validate_ranged_enum( 3034 report_data, "vkCreateGraphicsPipelines", 3035 "pCreateInfos[i].pColorBlendState->pAttachments[i].srcColorBlendFactor", "VkBlendFactor", 3036 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3037 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcColorBlendFactor); 3038 3039 skip_call |= validate_ranged_enum( 3040 report_data, "vkCreateGraphicsPipelines", 3041 "pCreateInfos[i].pColorBlendState->pAttachments[i].dstColorBlendFactor", "VkBlendFactor", 3042 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3043 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstColorBlendFactor); 3044 3045 skip_call |= validate_ranged_enum( 3046 report_data, "vkCreateGraphicsPipelines", 3047 "pCreateInfos[i].pColorBlendState->pAttachments[i].colorBlendOp", "VkBlendOp", VK_BLEND_OP_BEGIN_RANGE, 3048 VK_BLEND_OP_END_RANGE, pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorBlendOp); 3049 3050 skip_call |= validate_ranged_enum( 3051 report_data, "vkCreateGraphicsPipelines", 3052 "pCreateInfos[i].pColorBlendState->pAttachments[i].srcAlphaBlendFactor", "VkBlendFactor", 3053 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3054 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcAlphaBlendFactor); 3055 3056 skip_call |= validate_ranged_enum( 3057 report_data, "vkCreateGraphicsPipelines", 3058 "pCreateInfos[i].pColorBlendState->pAttachments[i].dstAlphaBlendFactor", "VkBlendFactor", 3059 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3060 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstAlphaBlendFactor); 3061 3062 skip_call |= validate_ranged_enum( 3063 report_data, "vkCreateGraphicsPipelines", 3064 "pCreateInfos[i].pColorBlendState->pAttachments[i].alphaBlendOp", "VkBlendOp", VK_BLEND_OP_BEGIN_RANGE, 3065 VK_BLEND_OP_END_RANGE, pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].alphaBlendOp); 3066 3067 skip_call |= 3068 validate_flags(report_data, "vkCreateGraphicsPipelines", 3069 "pCreateInfos[i].pColorBlendState->pAttachments[i].colorWriteMask", 3070 "VkColorComponentFlagBits", AllVkColorComponentFlagBits, 3071 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorWriteMask, false); 3072 } 3073 } 3074 3075 if (pCreateInfos[i].pColorBlendState->sType != VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO) { 3076 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3077 __LINE__, INVALID_STRUCT_STYPE, LayerName, 3078 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pColorBlendState->sType must be " 3079 "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO", 3080 i); 3081 } 3082 3083 // If logicOpEnable is VK_TRUE, logicOp must be a valid VkLogicOp value 3084 if (pCreateInfos[i].pColorBlendState->logicOpEnable == VK_TRUE) { 3085 skip_call |= validate_ranged_enum( 3086 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->logicOp", "VkLogicOp", 3087 VK_LOGIC_OP_BEGIN_RANGE, VK_LOGIC_OP_END_RANGE, pCreateInfos[i].pColorBlendState->logicOp); 3088 } 3089 } 3090 } 3091 } 3092 3093 if (!skip_call) { 3094 PreCreateGraphicsPipelines(device, pCreateInfos); 3095 3096 result = get_dispatch_table(pc_device_table_map, device) 3097 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3098 3099 validate_result(report_data, "vkCreateGraphicsPipelines", result); 3100 } 3101 3102 return result; 3103} 3104 3105bool PreCreateComputePipelines(VkDevice device, const VkComputePipelineCreateInfo *pCreateInfos) { 3106 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3107 3108 if (pCreateInfos != nullptr) { 3109 // TODO: Handle count! 3110 int i = 0; 3111 validate_string(data->report_data, "vkCreateComputePipelines", "pCreateInfos[i].stage.pName", pCreateInfos[i].stage.pName); 3112 } 3113 3114 return true; 3115} 3116 3117VKAPI_ATTR VkResult VKAPI_CALL 3118CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 3119 const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, 3120 VkPipeline *pPipelines) { 3121 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3122 bool skipCall = false; 3123 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3124 assert(my_data != NULL); 3125 3126 skipCall |= parameter_validation_vkCreateComputePipelines(my_data->report_data, pipelineCache, createInfoCount, pCreateInfos, pAllocator, 3127 pPipelines); 3128 3129 if (!skipCall) { 3130 PreCreateComputePipelines(device, pCreateInfos); 3131 3132 result = get_dispatch_table(pc_device_table_map, device) 3133 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3134 3135 validate_result(my_data->report_data, "vkCreateComputePipelines", result); 3136 } 3137 3138 return result; 3139} 3140 3141VKAPI_ATTR void VKAPI_CALL 3142DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) { 3143 bool skipCall = false; 3144 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3145 assert(my_data != NULL); 3146 3147 skipCall |= parameter_validation_vkDestroyPipeline(my_data->report_data, pipeline, pAllocator); 3148 3149 if (!skipCall) { 3150 get_dispatch_table(pc_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator); 3151 } 3152} 3153 3154VKAPI_ATTR VkResult VKAPI_CALL 3155CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, 3156 VkPipelineLayout *pPipelineLayout) { 3157 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3158 bool skipCall = false; 3159 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3160 assert(my_data != NULL); 3161 3162 skipCall |= parameter_validation_vkCreatePipelineLayout(my_data->report_data, pCreateInfo, pAllocator, pPipelineLayout); 3163 3164 if (!skipCall) { 3165 result = 3166 get_dispatch_table(pc_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); 3167 3168 validate_result(my_data->report_data, "vkCreatePipelineLayout", result); 3169 } 3170 3171 return result; 3172} 3173 3174VKAPI_ATTR void VKAPI_CALL 3175DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator) { 3176 bool skipCall = false; 3177 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3178 assert(my_data != NULL); 3179 3180 skipCall |= parameter_validation_vkDestroyPipelineLayout(my_data->report_data, pipelineLayout, pAllocator); 3181 3182 if (!skipCall) { 3183 get_dispatch_table(pc_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator); 3184 } 3185} 3186 3187VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, 3188 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) { 3189 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3190 bool skip_call = false; 3191 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3192 assert(device_data != NULL); 3193 debug_report_data *report_data = device_data->report_data; 3194 3195 skip_call |= parameter_validation_vkCreateSampler(report_data, pCreateInfo, pAllocator, pSampler); 3196 3197 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3198 if (pCreateInfo != nullptr) { 3199 // If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value 3200 if (pCreateInfo->compareEnable == VK_TRUE) { 3201 skip_call |= validate_ranged_enum(report_data, "vkCreateSampler", "pCreateInfo->compareOp", "VkCompareOp", 3202 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfo->compareOp); 3203 } 3204 3205 // If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a 3206 // valid VkBorderColor value 3207 if ((pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) || 3208 (pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) || 3209 (pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) { 3210 skip_call |= validate_ranged_enum(report_data, "vkCreateSampler", "pCreateInfo->borderColor", "VkBorderColor", 3211 VK_BORDER_COLOR_BEGIN_RANGE, VK_BORDER_COLOR_END_RANGE, pCreateInfo->borderColor); 3212 } 3213 } 3214 3215 if (!skip_call) { 3216 result = get_dispatch_table(pc_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler); 3217 3218 validate_result(report_data, "vkCreateSampler", result); 3219 } 3220 3221 return result; 3222} 3223 3224VKAPI_ATTR void VKAPI_CALL 3225DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) { 3226 bool skipCall = false; 3227 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3228 assert(my_data != NULL); 3229 3230 skipCall |= parameter_validation_vkDestroySampler(my_data->report_data, sampler, pAllocator); 3231 3232 if (!skipCall) { 3233 get_dispatch_table(pc_device_table_map, device)->DestroySampler(device, sampler, pAllocator); 3234 } 3235} 3236 3237VKAPI_ATTR VkResult VKAPI_CALL 3238CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, 3239 const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout) { 3240 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3241 bool skip_call = false; 3242 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3243 assert(device_data != nullptr); 3244 debug_report_data *report_data = device_data->report_data; 3245 3246 skip_call |= parameter_validation_vkCreateDescriptorSetLayout(report_data, pCreateInfo, pAllocator, pSetLayout); 3247 3248 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3249 if ((pCreateInfo != nullptr) && (pCreateInfo->pBindings != nullptr)) { 3250 for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) { 3251 if (pCreateInfo->pBindings[i].descriptorCount != 0) { 3252 // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and descriptorCount 3253 // is not 0 and pImmutableSamplers is not NULL, pImmutableSamplers must be a pointer to an array of descriptorCount 3254 // valid VkSampler handles 3255 if (((pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || 3256 (pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) && 3257 (pCreateInfo->pBindings[i].pImmutableSamplers != nullptr)) { 3258 for (uint32_t descriptor_index = 0; descriptor_index < pCreateInfo->pBindings[i].descriptorCount; 3259 ++descriptor_index) { 3260 if (pCreateInfo->pBindings[i].pImmutableSamplers[descriptor_index] == VK_NULL_HANDLE) { 3261 skip_call |= 3262 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3263 __LINE__, REQUIRED_PARAMETER, LayerName, "vkCreateDescriptorSetLayout: required parameter " 3264 "pCreateInfo->pBindings[%d].pImmutableSamplers[%d]" 3265 " specified as VK_NULL_HANDLE", 3266 i, descriptor_index); 3267 } 3268 } 3269 } 3270 3271 // If descriptorCount is not 0, stageFlags must be a valid combination of VkShaderStageFlagBits values 3272 if ((pCreateInfo->pBindings[i].stageFlags != 0) && 3273 ((pCreateInfo->pBindings[i].stageFlags & (~AllVkShaderStageFlagBits)) != 0)) { 3274 skip_call |= 3275 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3276 UNRECOGNIZED_VALUE, LayerName, 3277 "vkCreateDescriptorSetLayout: if pCreateInfo->pBindings[%d].descriptorCount is not 0, " 3278 "pCreateInfo->pBindings[%d].stageFlags must be a valid combination of VkShaderStageFlagBits values", 3279 i, i); 3280 } 3281 } 3282 } 3283 } 3284 3285 if (!skip_call) { 3286 result = 3287 get_dispatch_table(pc_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); 3288 3289 validate_result(report_data, "vkCreateDescriptorSetLayout", result); 3290 } 3291 3292 return result; 3293} 3294 3295VKAPI_ATTR void VKAPI_CALL 3296DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator) { 3297 bool skipCall = false; 3298 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3299 assert(my_data != NULL); 3300 3301 skipCall |= parameter_validation_vkDestroyDescriptorSetLayout(my_data->report_data, descriptorSetLayout, pAllocator); 3302 3303 if (!skipCall) { 3304 get_dispatch_table(pc_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); 3305 } 3306} 3307 3308VKAPI_ATTR VkResult VKAPI_CALL 3309CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, 3310 VkDescriptorPool *pDescriptorPool) { 3311 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3312 bool skipCall = false; 3313 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3314 assert(my_data != NULL); 3315 3316 skipCall |= parameter_validation_vkCreateDescriptorPool(my_data->report_data, pCreateInfo, pAllocator, pDescriptorPool); 3317 3318 /* TODOVV: How do we validate maxSets? Probably belongs in the limits layer? */ 3319 3320 if (!skipCall) { 3321 result = 3322 get_dispatch_table(pc_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); 3323 3324 validate_result(my_data->report_data, "vkCreateDescriptorPool", result); 3325 } 3326 3327 return result; 3328} 3329 3330VKAPI_ATTR void VKAPI_CALL 3331DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) { 3332 bool skipCall = false; 3333 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3334 assert(my_data != NULL); 3335 3336 skipCall |= parameter_validation_vkDestroyDescriptorPool(my_data->report_data, descriptorPool, pAllocator); 3337 3338 if (!skipCall) { 3339 get_dispatch_table(pc_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator); 3340 } 3341} 3342 3343VKAPI_ATTR VkResult VKAPI_CALL 3344ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) { 3345 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3346 bool skipCall = false; 3347 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3348 assert(my_data != NULL); 3349 3350 skipCall |= parameter_validation_vkResetDescriptorPool(my_data->report_data, descriptorPool, flags); 3351 3352 if (!skipCall) { 3353 result = get_dispatch_table(pc_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags); 3354 3355 validate_result(my_data->report_data, "vkResetDescriptorPool", result); 3356 } 3357 3358 return result; 3359} 3360 3361VKAPI_ATTR VkResult VKAPI_CALL 3362AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets) { 3363 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3364 bool skipCall = false; 3365 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3366 assert(my_data != NULL); 3367 3368 skipCall |= parameter_validation_vkAllocateDescriptorSets(my_data->report_data, pAllocateInfo, pDescriptorSets); 3369 3370 if (!skipCall) { 3371 result = get_dispatch_table(pc_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 3372 3373 validate_result(my_data->report_data, "vkAllocateDescriptorSets", result); 3374 } 3375 3376 return result; 3377} 3378 3379VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, 3380 uint32_t descriptorSetCount, 3381 const VkDescriptorSet *pDescriptorSets) { 3382 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3383 bool skip_call = false; 3384 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3385 assert(device_data != nullptr); 3386 debug_report_data *report_data = device_data->report_data; 3387 3388 skip_call |= parameter_validation_vkFreeDescriptorSets(report_data, descriptorPool, descriptorSetCount, pDescriptorSets); 3389 3390 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3391 // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond 3392 // validate_array() 3393 skip_call |= validate_array(report_data, "vkFreeDescriptorSets", "descriptorSetCount", "pDescriptorSets", descriptorSetCount, 3394 pDescriptorSets, true, true); 3395 3396 if (!skip_call) { 3397 result = get_dispatch_table(pc_device_table_map, device) 3398 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); 3399 3400 validate_result(report_data, "vkFreeDescriptorSets", result); 3401 } 3402 3403 return result; 3404} 3405 3406VKAPI_ATTR void VKAPI_CALL 3407UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, 3408 uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies) { 3409 bool skip_call = false; 3410 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3411 assert(device_data != NULL); 3412 debug_report_data *report_data = device_data->report_data; 3413 3414 skip_call |= parameter_validation_vkUpdateDescriptorSets(report_data, descriptorWriteCount, pDescriptorWrites, 3415 descriptorCopyCount, pDescriptorCopies); 3416 3417 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3418 if (pDescriptorWrites != NULL) { 3419 for (uint32_t i = 0; i < descriptorWriteCount; ++i) { 3420 // descriptorCount must be greater than 0 3421 if (pDescriptorWrites[i].descriptorCount == 0) { 3422 skip_call |= 3423 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3424 REQUIRED_PARAMETER, LayerName, 3425 "vkUpdateDescriptorSets: parameter pDescriptorWrites[%d].descriptorCount must be greater than 0", i); 3426 } 3427 3428 if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || 3429 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || 3430 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || 3431 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) || 3432 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { 3433 // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 3434 // VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 3435 // pImageInfo must be a pointer to an array of descriptorCount valid VkDescriptorImageInfo structures 3436 if (pDescriptorWrites[i].pImageInfo == nullptr) { 3437 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3438 __LINE__, REQUIRED_PARAMETER, LayerName, 3439 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3440 "VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, " 3441 "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or " 3442 "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, pDescriptorWrites[%d].pImageInfo must not be NULL", 3443 i, i); 3444 } else if (pDescriptorWrites[i].descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER) { 3445 // If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 3446 // VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout 3447 // members of any given element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively 3448 for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount; 3449 ++descriptor_index) { 3450 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3451 "pDescriptorWrites[i].pImageInfo[i].imageView", 3452 pDescriptorWrites[i].pImageInfo[descriptor_index].imageView); 3453 skip_call |= validate_ranged_enum(report_data, "vkUpdateDescriptorSets", 3454 "pDescriptorWrites[i].pImageInfo[i].imageLayout", "VkImageLayout", 3455 VK_IMAGE_LAYOUT_BEGIN_RANGE, VK_IMAGE_LAYOUT_END_RANGE, 3456 pDescriptorWrites[i].pImageInfo[descriptor_index].imageLayout); 3457 } 3458 } 3459 } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || 3460 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) || 3461 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || 3462 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { 3463 // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 3464 // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, pBufferInfo must be a 3465 // pointer to an array of descriptorCount valid VkDescriptorBufferInfo structures 3466 if (pDescriptorWrites[i].pBufferInfo == nullptr) { 3467 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3468 __LINE__, REQUIRED_PARAMETER, LayerName, 3469 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3470 "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, " 3471 "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, " 3472 "pDescriptorWrites[%d].pBufferInfo must not be NULL", 3473 i, i); 3474 } else { 3475 for (uint32_t descriptorIndex = 0; descriptorIndex < pDescriptorWrites[i].descriptorCount; ++descriptorIndex) { 3476 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3477 "pDescriptorWrites[i].pBufferInfo[i].buffer", 3478 pDescriptorWrites[i].pBufferInfo[descriptorIndex].buffer); 3479 } 3480 } 3481 } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) || 3482 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) { 3483 // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 3484 // pTexelBufferView must be a pointer to an array of descriptorCount valid VkBufferView handles 3485 if (pDescriptorWrites[i].pTexelBufferView == nullptr) { 3486 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3487 __LINE__, REQUIRED_PARAMETER, LayerName, 3488 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3489 "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, " 3490 "pDescriptorWrites[%d].pTexelBufferView must not be NULL", 3491 i, i); 3492 } else { 3493 for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount; 3494 ++descriptor_index) { 3495 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3496 "pDescriptorWrites[i].pTexelBufferView[i]", 3497 pDescriptorWrites[i].pTexelBufferView[descriptor_index]); 3498 } 3499 } 3500 } 3501 } 3502 } 3503 3504 if (!skip_call) { 3505 get_dispatch_table(pc_device_table_map, device) 3506 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); 3507 } 3508} 3509 3510VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, 3511 const VkAllocationCallbacks *pAllocator, 3512 VkFramebuffer *pFramebuffer) { 3513 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3514 bool skipCall = false; 3515 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3516 assert(my_data != NULL); 3517 3518 skipCall |= parameter_validation_vkCreateFramebuffer(my_data->report_data, pCreateInfo, pAllocator, pFramebuffer); 3519 3520 if (!skipCall) { 3521 result = get_dispatch_table(pc_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); 3522 3523 validate_result(my_data->report_data, "vkCreateFramebuffer", result); 3524 } 3525 3526 return result; 3527} 3528 3529VKAPI_ATTR void VKAPI_CALL 3530DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) { 3531 bool skipCall = false; 3532 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3533 assert(my_data != NULL); 3534 3535 skipCall |= parameter_validation_vkDestroyFramebuffer(my_data->report_data, framebuffer, pAllocator); 3536 3537 if (!skipCall) { 3538 get_dispatch_table(pc_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator); 3539 } 3540} 3541 3542VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, 3543 const VkAllocationCallbacks *pAllocator, 3544 VkRenderPass *pRenderPass) { 3545 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3546 bool skipCall = false; 3547 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3548 assert(my_data != NULL); 3549 3550 skipCall |= parameter_validation_vkCreateRenderPass(my_data->report_data, pCreateInfo, pAllocator, pRenderPass); 3551 3552 if (!skipCall) { 3553 result = get_dispatch_table(pc_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); 3554 3555 validate_result(my_data->report_data, "vkCreateRenderPass", result); 3556 } 3557 3558 return result; 3559} 3560 3561VKAPI_ATTR void VKAPI_CALL 3562DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) { 3563 bool skipCall = false; 3564 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3565 assert(my_data != NULL); 3566 3567 skipCall |= parameter_validation_vkDestroyRenderPass(my_data->report_data, renderPass, pAllocator); 3568 3569 if (!skipCall) { 3570 get_dispatch_table(pc_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator); 3571 } 3572} 3573 3574VKAPI_ATTR void VKAPI_CALL 3575GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) { 3576 bool skipCall = false; 3577 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3578 assert(my_data != NULL); 3579 3580 skipCall |= parameter_validation_vkGetRenderAreaGranularity(my_data->report_data, renderPass, pGranularity); 3581 3582 if (!skipCall) { 3583 get_dispatch_table(pc_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity); 3584 } 3585} 3586 3587VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, 3588 const VkAllocationCallbacks *pAllocator, 3589 VkCommandPool *pCommandPool) { 3590 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3591 bool skipCall = false; 3592 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3593 assert(my_data != NULL); 3594 3595 skipCall |= 3596 validate_queue_family_index(my_data, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex", pCreateInfo->queueFamilyIndex); 3597 3598 skipCall |= parameter_validation_vkCreateCommandPool(my_data->report_data, pCreateInfo, pAllocator, pCommandPool); 3599 3600 if (!skipCall) { 3601 result = get_dispatch_table(pc_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); 3602 3603 validate_result(my_data->report_data, "vkCreateCommandPool", result); 3604 } 3605 3606 return result; 3607} 3608 3609VKAPI_ATTR void VKAPI_CALL 3610DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) { 3611 bool skipCall = false; 3612 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3613 assert(my_data != NULL); 3614 3615 skipCall |= parameter_validation_vkDestroyCommandPool(my_data->report_data, commandPool, pAllocator); 3616 3617 if (!skipCall) { 3618 get_dispatch_table(pc_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator); 3619 } 3620} 3621 3622VKAPI_ATTR VkResult VKAPI_CALL 3623ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) { 3624 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3625 bool skipCall = false; 3626 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3627 assert(my_data != NULL); 3628 3629 skipCall |= parameter_validation_vkResetCommandPool(my_data->report_data, commandPool, flags); 3630 3631 if (!skipCall) { 3632 result = get_dispatch_table(pc_device_table_map, device)->ResetCommandPool(device, commandPool, flags); 3633 3634 validate_result(my_data->report_data, "vkResetCommandPool", result); 3635 } 3636 3637 return result; 3638} 3639 3640VKAPI_ATTR VkResult VKAPI_CALL 3641AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers) { 3642 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3643 bool skipCall = false; 3644 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3645 assert(my_data != NULL); 3646 3647 skipCall |= parameter_validation_vkAllocateCommandBuffers(my_data->report_data, pAllocateInfo, pCommandBuffers); 3648 3649 if (!skipCall) { 3650 result = get_dispatch_table(pc_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); 3651 3652 validate_result(my_data->report_data, "vkAllocateCommandBuffers", result); 3653 } 3654 3655 return result; 3656} 3657 3658VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, 3659 uint32_t commandBufferCount, 3660 const VkCommandBuffer *pCommandBuffers) { 3661 bool skip_call = false; 3662 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3663 assert(device_data != nullptr); 3664 debug_report_data *report_data = device_data->report_data; 3665 3666 skip_call |= parameter_validation_vkFreeCommandBuffers(report_data, commandPool, commandBufferCount, pCommandBuffers); 3667 3668 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3669 // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond 3670 // validate_array() 3671 skip_call |= validate_array(report_data, "vkFreeCommandBuffers", "commandBufferCount", "pCommandBuffers", commandBufferCount, 3672 pCommandBuffers, true, true); 3673 3674 if (!skip_call) { 3675 get_dispatch_table(pc_device_table_map, device) 3676 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); 3677 } 3678} 3679 3680VKAPI_ATTR VkResult VKAPI_CALL 3681BeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) { 3682 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3683 bool skip_call = false; 3684 layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3685 assert(device_data != nullptr); 3686 debug_report_data *report_data = device_data->report_data; 3687 3688 skip_call |= parameter_validation_vkBeginCommandBuffer(report_data, pBeginInfo); 3689 3690 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3691 // TODO: pBeginInfo->pInheritanceInfo must not be NULL if commandBuffer is a secondary command buffer 3692 skip_call |= validate_struct_type(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo", 3693 "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO", pBeginInfo->pInheritanceInfo, 3694 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, false); 3695 3696 if (pBeginInfo->pInheritanceInfo != NULL) { 3697 skip_call |= validate_struct_pnext(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pNext", NULL, 3698 pBeginInfo->pInheritanceInfo->pNext, 0, NULL); 3699 3700 skip_call |= validate_bool32(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->occlusionQueryEnable", 3701 pBeginInfo->pInheritanceInfo->occlusionQueryEnable); 3702 3703 // TODO: This only needs to be validated when the inherited queries feature is enabled 3704 // skip_call |= validate_flags(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->queryFlags", 3705 // "VkQueryControlFlagBits", AllVkQueryControlFlagBits, pBeginInfo->pInheritanceInfo->queryFlags, false); 3706 3707 // TODO: This must be 0 if the pipeline statistics queries feature is not enabled 3708 skip_call |= validate_flags(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pipelineStatistics", 3709 "VkQueryPipelineStatisticFlagBits", AllVkQueryPipelineStatisticFlagBits, 3710 pBeginInfo->pInheritanceInfo->pipelineStatistics, false); 3711 } 3712 3713 if (!skip_call) { 3714 result = get_dispatch_table(pc_device_table_map, commandBuffer)->BeginCommandBuffer(commandBuffer, pBeginInfo); 3715 3716 validate_result(report_data, "vkBeginCommandBuffer", result); 3717 } 3718 3719 return result; 3720} 3721 3722VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) { 3723 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3724 assert(my_data != NULL); 3725 3726 VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer); 3727 3728 validate_result(my_data->report_data, "vkEndCommandBuffer", result); 3729 3730 return result; 3731} 3732 3733VKAPI_ATTR VkResult VKAPI_CALL 3734ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) { 3735 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3736 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3737 assert(my_data != NULL); 3738 3739 bool skip_call = parameter_validation_vkResetCommandBuffer(my_data->report_data, flags); 3740 3741 if (!skip_call) { 3742 result = get_dispatch_table(pc_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags); 3743 3744 validate_result(my_data->report_data, "vkResetCommandBuffer", result); 3745 } 3746 3747 return result; 3748} 3749 3750VKAPI_ATTR void VKAPI_CALL 3751CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) { 3752 bool skipCall = false; 3753 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3754 assert(my_data != NULL); 3755 3756 skipCall |= parameter_validation_vkCmdBindPipeline(my_data->report_data, pipelineBindPoint, pipeline); 3757 3758 if (!skipCall) { 3759 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); 3760 } 3761} 3762 3763VKAPI_ATTR void VKAPI_CALL 3764CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) { 3765 bool skipCall = false; 3766 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3767 assert(my_data != NULL); 3768 3769 skipCall |= parameter_validation_vkCmdSetViewport(my_data->report_data, firstViewport, viewportCount, pViewports); 3770 3771 if (!skipCall) { 3772 get_dispatch_table(pc_device_table_map, commandBuffer) 3773 ->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); 3774 } 3775} 3776 3777VKAPI_ATTR void VKAPI_CALL 3778CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors) { 3779 bool skipCall = false; 3780 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3781 assert(my_data != NULL); 3782 3783 skipCall |= parameter_validation_vkCmdSetScissor(my_data->report_data, firstScissor, scissorCount, pScissors); 3784 3785 if (!skipCall) { 3786 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); 3787 } 3788} 3789 3790VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) { 3791 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth); 3792} 3793 3794VKAPI_ATTR void VKAPI_CALL 3795CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) { 3796 get_dispatch_table(pc_device_table_map, commandBuffer) 3797 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); 3798} 3799 3800VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) { 3801 bool skipCall = false; 3802 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3803 assert(my_data != NULL); 3804 3805 skipCall |= parameter_validation_vkCmdSetBlendConstants(my_data->report_data, blendConstants); 3806 3807 if (!skipCall) { 3808 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants); 3809 } 3810} 3811 3812VKAPI_ATTR void VKAPI_CALL 3813CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) { 3814 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); 3815} 3816 3817VKAPI_ATTR void VKAPI_CALL 3818CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) { 3819 bool skipCall = false; 3820 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3821 assert(my_data != NULL); 3822 3823 skipCall |= parameter_validation_vkCmdSetStencilCompareMask(my_data->report_data, faceMask, compareMask); 3824 3825 if (!skipCall) { 3826 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); 3827 } 3828} 3829 3830VKAPI_ATTR void VKAPI_CALL 3831CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) { 3832 bool skipCall = false; 3833 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3834 assert(my_data != NULL); 3835 3836 skipCall |= parameter_validation_vkCmdSetStencilWriteMask(my_data->report_data, faceMask, writeMask); 3837 3838 if (!skipCall) { 3839 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); 3840 } 3841} 3842 3843VKAPI_ATTR void VKAPI_CALL 3844CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) { 3845 bool skipCall = false; 3846 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3847 assert(my_data != NULL); 3848 3849 skipCall |= parameter_validation_vkCmdSetStencilReference(my_data->report_data, faceMask, reference); 3850 3851 if (!skipCall) { 3852 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference); 3853 } 3854} 3855 3856VKAPI_ATTR void VKAPI_CALL 3857CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, 3858 uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, 3859 uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets) { 3860 bool skipCall = false; 3861 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3862 assert(my_data != NULL); 3863 3864 skipCall |= parameter_validation_vkCmdBindDescriptorSets(my_data->report_data, pipelineBindPoint, layout, firstSet, descriptorSetCount, 3865 pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); 3866 3867 if (!skipCall) { 3868 get_dispatch_table(pc_device_table_map, commandBuffer) 3869 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, 3870 dynamicOffsetCount, pDynamicOffsets); 3871 } 3872} 3873 3874VKAPI_ATTR void VKAPI_CALL 3875CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) { 3876 bool skipCall = false; 3877 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3878 assert(my_data != NULL); 3879 3880 skipCall |= parameter_validation_vkCmdBindIndexBuffer(my_data->report_data, buffer, offset, indexType); 3881 3882 if (!skipCall) { 3883 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); 3884 } 3885} 3886 3887VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, 3888 uint32_t bindingCount, const VkBuffer *pBuffers, 3889 const VkDeviceSize *pOffsets) { 3890 bool skipCall = false; 3891 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3892 assert(my_data != NULL); 3893 3894 skipCall |= parameter_validation_vkCmdBindVertexBuffers(my_data->report_data, firstBinding, bindingCount, pBuffers, pOffsets); 3895 3896 if (!skipCall) { 3897 get_dispatch_table(pc_device_table_map, commandBuffer) 3898 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); 3899 } 3900} 3901 3902bool PreCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, 3903 uint32_t firstInstance) { 3904 if (vertexCount == 0) { 3905 // TODO: Verify against Valid Usage section. I don't see a non-zero vertexCount listed, may need to add that and make 3906 // this an error or leave as is. 3907 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3908 REQUIRED_PARAMETER, "PARAMCHECK", "vkCmdDraw parameter, uint32_t vertexCount, is 0"); 3909 return false; 3910 } 3911 3912 if (instanceCount == 0) { 3913 // TODO: Verify against Valid Usage section. I don't see a non-zero instanceCount listed, may need to add that and make 3914 // this an error or leave as is. 3915 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3916 REQUIRED_PARAMETER, "PARAMCHECK", "vkCmdDraw parameter, uint32_t instanceCount, is 0"); 3917 return false; 3918 } 3919 3920 return true; 3921} 3922 3923VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, 3924 uint32_t firstVertex, uint32_t firstInstance) { 3925 PreCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); 3926 3927 get_dispatch_table(pc_device_table_map, commandBuffer) 3928 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); 3929} 3930 3931VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, 3932 uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, 3933 uint32_t firstInstance) { 3934 get_dispatch_table(pc_device_table_map, commandBuffer) 3935 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); 3936} 3937 3938VKAPI_ATTR void VKAPI_CALL 3939CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 3940 bool skipCall = false; 3941 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3942 assert(my_data != NULL); 3943 3944 skipCall |= parameter_validation_vkCmdDrawIndirect(my_data->report_data, buffer, offset, count, stride); 3945 3946 if (!skipCall) { 3947 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride); 3948 } 3949} 3950 3951VKAPI_ATTR void VKAPI_CALL 3952CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) { 3953 bool skipCall = false; 3954 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3955 assert(my_data != NULL); 3956 3957 skipCall |= parameter_validation_vkCmdDrawIndexedIndirect(my_data->report_data, buffer, offset, count, stride); 3958 3959 if (!skipCall) { 3960 get_dispatch_table(pc_device_table_map, commandBuffer) 3961 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride); 3962 } 3963} 3964 3965VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) { 3966 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z); 3967} 3968 3969VKAPI_ATTR void VKAPI_CALL 3970CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) { 3971 bool skipCall = false; 3972 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3973 assert(my_data != NULL); 3974 3975 skipCall |= parameter_validation_vkCmdDispatchIndirect(my_data->report_data, buffer, offset); 3976 3977 if (!skipCall) { 3978 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset); 3979 } 3980} 3981 3982VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, 3983 uint32_t regionCount, const VkBufferCopy *pRegions) { 3984 bool skipCall = false; 3985 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3986 assert(my_data != NULL); 3987 3988 skipCall |= parameter_validation_vkCmdCopyBuffer(my_data->report_data, srcBuffer, dstBuffer, regionCount, pRegions); 3989 3990 if (!skipCall) { 3991 get_dispatch_table(pc_device_table_map, commandBuffer) 3992 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); 3993 } 3994} 3995 3996bool PreCmdCopyImage(VkCommandBuffer commandBuffer, const VkImageCopy *pRegions) { 3997 if (pRegions != nullptr) { 3998 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 3999 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4000 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4001 UNRECOGNIZED_VALUE, "PARAMCHECK", 4002 "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4003 return false; 4004 } 4005 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4006 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4007 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4008 UNRECOGNIZED_VALUE, "PARAMCHECK", 4009 "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4010 return false; 4011 } 4012 } 4013 4014 return true; 4015} 4016 4017VKAPI_ATTR void VKAPI_CALL 4018CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, 4019 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions) { 4020 bool skipCall = false; 4021 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4022 assert(my_data != NULL); 4023 4024 skipCall |= 4025 parameter_validation_vkCmdCopyImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 4026 4027 if (!skipCall) { 4028 PreCmdCopyImage(commandBuffer, pRegions); 4029 4030 get_dispatch_table(pc_device_table_map, commandBuffer) 4031 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 4032 } 4033} 4034 4035bool PreCmdBlitImage(VkCommandBuffer commandBuffer, const VkImageBlit *pRegions) { 4036 if (pRegions != nullptr) { 4037 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4038 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4039 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4040 UNRECOGNIZED_VALUE, "PARAMCHECK", 4041 "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4042 return false; 4043 } 4044 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4045 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4046 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4047 UNRECOGNIZED_VALUE, "PARAMCHECK", 4048 "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4049 return false; 4050 } 4051 } 4052 4053 return true; 4054} 4055 4056VKAPI_ATTR void VKAPI_CALL 4057CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, 4058 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter) { 4059 bool skipCall = false; 4060 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4061 assert(my_data != NULL); 4062 4063 skipCall |= parameter_validation_vkCmdBlitImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, 4064 pRegions, filter); 4065 4066 if (!skipCall) { 4067 PreCmdBlitImage(commandBuffer, pRegions); 4068 4069 get_dispatch_table(pc_device_table_map, commandBuffer) 4070 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); 4071 } 4072} 4073 4074bool PreCmdCopyBufferToImage(VkCommandBuffer commandBuffer, const VkBufferImageCopy *pRegions) { 4075 if (pRegions != nullptr) { 4076 if ((pRegions->imageSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4077 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4078 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4079 UNRECOGNIZED_VALUE, "PARAMCHECK", 4080 "vkCmdCopyBufferToImage parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized " 4081 "enumerator"); 4082 return false; 4083 } 4084 } 4085 4086 return true; 4087} 4088 4089VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, 4090 VkImage dstImage, VkImageLayout dstImageLayout, 4091 uint32_t regionCount, const VkBufferImageCopy *pRegions) { 4092 bool skipCall = false; 4093 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4094 assert(my_data != NULL); 4095 4096 skipCall |= 4097 parameter_validation_vkCmdCopyBufferToImage(my_data->report_data, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); 4098 4099 if (!skipCall) { 4100 PreCmdCopyBufferToImage(commandBuffer, pRegions); 4101 4102 get_dispatch_table(pc_device_table_map, commandBuffer) 4103 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); 4104 } 4105} 4106 4107bool PreCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, const VkBufferImageCopy *pRegions) { 4108 if (pRegions != nullptr) { 4109 if ((pRegions->imageSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4110 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4111 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4112 UNRECOGNIZED_VALUE, "PARAMCHECK", 4113 "vkCmdCopyImageToBuffer parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized " 4114 "enumerator"); 4115 return false; 4116 } 4117 } 4118 4119 return true; 4120} 4121 4122VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, 4123 VkImageLayout srcImageLayout, VkBuffer dstBuffer, 4124 uint32_t regionCount, const VkBufferImageCopy *pRegions) { 4125 bool skipCall = false; 4126 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4127 assert(my_data != NULL); 4128 4129 skipCall |= 4130 parameter_validation_vkCmdCopyImageToBuffer(my_data->report_data, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); 4131 4132 if (!skipCall) { 4133 PreCmdCopyImageToBuffer(commandBuffer, pRegions); 4134 4135 get_dispatch_table(pc_device_table_map, commandBuffer) 4136 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); 4137 } 4138} 4139 4140VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 4141 VkDeviceSize dataSize, const uint32_t *pData) { 4142 bool skip_call = false; 4143 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4144 assert(my_data != NULL); 4145 4146 skip_call |= parameter_validation_vkCmdUpdateBuffer(my_data->report_data, dstBuffer, dstOffset, dataSize, pData); 4147 4148 if (dstOffset & 3) { 4149 skip_call |= 4150 log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4151 "PARAMCHECK", "CmdUpdateBuffer parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4", 4152 dstOffset); 4153 } 4154 4155 if ((dataSize <= 0) || (dataSize > 65536)) { 4156 skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, 4157 INVALID_USAGE, "PARAMCHECK", "CmdUpdateBuffer parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64 4158 "), must be greater than zero and less than or equal to 65536", 4159 dataSize); 4160 } else if (dataSize & 3) { 4161 skip_call |= 4162 log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4163 "PARAMCHECK", "CmdUpdateBuffer parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64 "), is not a multiple of 4", 4164 dataSize); 4165 } 4166 4167 if (!skip_call) { 4168 get_dispatch_table(pc_device_table_map, commandBuffer) 4169 ->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); 4170 } 4171} 4172 4173VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 4174 VkDeviceSize size, uint32_t data) { 4175 bool skip_call = false; 4176 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4177 assert(my_data != NULL); 4178 4179 skip_call |= parameter_validation_vkCmdFillBuffer(my_data->report_data, dstBuffer, dstOffset, size, data); 4180 4181 if (dstOffset & 3) { 4182 skip_call |= log_msg( 4183 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, "DL", 4184 "vkCmdFillBuffer parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4", dstOffset); 4185 } 4186 4187 if (size != VK_WHOLE_SIZE) { 4188 if (size <= 0) { 4189 skip_call |= log_msg( 4190 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4191 "DL", "vkCmdFillBuffer parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), must be greater than zero", size); 4192 } else if (size & 3) { 4193 skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, 4194 INVALID_USAGE, "DL", 4195 "vkCmdFillBuffer parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), is not a multiple of 4", size); 4196 } 4197 } 4198 4199 if (!skip_call) { 4200 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); 4201 } 4202} 4203 4204VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, 4205 VkImageLayout imageLayout, const VkClearColorValue *pColor, 4206 uint32_t rangeCount, const VkImageSubresourceRange *pRanges) { 4207 bool skipCall = false; 4208 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4209 assert(my_data != NULL); 4210 4211 skipCall |= parameter_validation_vkCmdClearColorImage(my_data->report_data, image, imageLayout, pColor, rangeCount, pRanges); 4212 4213 if (!skipCall) { 4214 get_dispatch_table(pc_device_table_map, commandBuffer) 4215 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); 4216 } 4217} 4218 4219VKAPI_ATTR void VKAPI_CALL 4220CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 4221 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, 4222 const VkImageSubresourceRange *pRanges) { 4223 bool skipCall = false; 4224 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4225 assert(my_data != NULL); 4226 4227 skipCall |= 4228 parameter_validation_vkCmdClearDepthStencilImage(my_data->report_data, image, imageLayout, pDepthStencil, rangeCount, pRanges); 4229 4230 if (!skipCall) { 4231 get_dispatch_table(pc_device_table_map, commandBuffer) 4232 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); 4233 } 4234} 4235 4236VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, 4237 const VkClearAttachment *pAttachments, uint32_t rectCount, 4238 const VkClearRect *pRects) { 4239 bool skipCall = false; 4240 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4241 assert(my_data != NULL); 4242 4243 skipCall |= parameter_validation_vkCmdClearAttachments(my_data->report_data, attachmentCount, pAttachments, rectCount, pRects); 4244 4245 if (!skipCall) { 4246 get_dispatch_table(pc_device_table_map, commandBuffer) 4247 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); 4248 } 4249} 4250 4251bool PreCmdResolveImage(VkCommandBuffer commandBuffer, const VkImageResolve *pRegions) { 4252 if (pRegions != nullptr) { 4253 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4254 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4255 log_msg( 4256 mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4257 UNRECOGNIZED_VALUE, "PARAMCHECK", 4258 "vkCmdResolveImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4259 return false; 4260 } 4261 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4262 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4263 log_msg( 4264 mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4265 UNRECOGNIZED_VALUE, "PARAMCHECK", 4266 "vkCmdResolveImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4267 return false; 4268 } 4269 } 4270 4271 return true; 4272} 4273 4274VKAPI_ATTR void VKAPI_CALL 4275CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, 4276 VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions) { 4277 bool skipCall = false; 4278 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4279 assert(my_data != NULL); 4280 4281 skipCall |= parameter_validation_vkCmdResolveImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, 4282 pRegions); 4283 4284 if (!skipCall) { 4285 PreCmdResolveImage(commandBuffer, pRegions); 4286 4287 get_dispatch_table(pc_device_table_map, commandBuffer) 4288 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 4289 } 4290} 4291 4292VKAPI_ATTR void VKAPI_CALL 4293CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 4294 bool skipCall = false; 4295 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4296 assert(my_data != NULL); 4297 4298 skipCall |= parameter_validation_vkCmdSetEvent(my_data->report_data, event, stageMask); 4299 4300 if (!skipCall) { 4301 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask); 4302 } 4303} 4304 4305VKAPI_ATTR void VKAPI_CALL 4306CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 4307 bool skipCall = false; 4308 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4309 assert(my_data != NULL); 4310 4311 skipCall |= parameter_validation_vkCmdResetEvent(my_data->report_data, event, stageMask); 4312 4313 if (!skipCall) { 4314 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask); 4315 } 4316} 4317 4318VKAPI_ATTR void VKAPI_CALL 4319CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, 4320 VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 4321 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 4322 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 4323 bool skipCall = false; 4324 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4325 assert(my_data != NULL); 4326 4327 skipCall |= parameter_validation_vkCmdWaitEvents(my_data->report_data, eventCount, pEvents, srcStageMask, dstStageMask, 4328 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, 4329 imageMemoryBarrierCount, pImageMemoryBarriers); 4330 4331 if (!skipCall) { 4332 get_dispatch_table(pc_device_table_map, commandBuffer) 4333 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, 4334 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4335 } 4336} 4337 4338VKAPI_ATTR void VKAPI_CALL 4339CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, 4340 VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 4341 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 4342 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 4343 bool skipCall = false; 4344 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4345 assert(my_data != NULL); 4346 4347 skipCall |= parameter_validation_vkCmdPipelineBarrier(my_data->report_data, srcStageMask, dstStageMask, dependencyFlags, 4348 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, 4349 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4350 4351 if (!skipCall) { 4352 get_dispatch_table(pc_device_table_map, commandBuffer) 4353 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, 4354 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4355 } 4356} 4357 4358VKAPI_ATTR void VKAPI_CALL 4359CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) { 4360 bool skipCall = false; 4361 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4362 assert(my_data != NULL); 4363 4364 skipCall |= parameter_validation_vkCmdBeginQuery(my_data->report_data, queryPool, slot, flags); 4365 4366 if (!skipCall) { 4367 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, slot, flags); 4368 } 4369} 4370 4371VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) { 4372 bool skipCall = false; 4373 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4374 assert(my_data != NULL); 4375 4376 skipCall |= parameter_validation_vkCmdEndQuery(my_data->report_data, queryPool, slot); 4377 4378 if (!skipCall) { 4379 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, slot); 4380 } 4381} 4382 4383VKAPI_ATTR void VKAPI_CALL 4384CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) { 4385 bool skipCall = false; 4386 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4387 assert(my_data != NULL); 4388 4389 skipCall |= parameter_validation_vkCmdResetQueryPool(my_data->report_data, queryPool, firstQuery, queryCount); 4390 4391 if (!skipCall) { 4392 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); 4393 } 4394} 4395 4396bool PostCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, 4397 uint32_t slot) { 4398 4399 ValidateEnumerator(pipelineStage); 4400 4401 return true; 4402} 4403 4404VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, 4405 VkQueryPool queryPool, uint32_t query) { 4406 bool skipCall = false; 4407 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4408 assert(my_data != NULL); 4409 4410 skipCall |= parameter_validation_vkCmdWriteTimestamp(my_data->report_data, pipelineStage, queryPool, query); 4411 4412 if (!skipCall) { 4413 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 4414 4415 PostCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 4416 } 4417} 4418 4419VKAPI_ATTR void VKAPI_CALL 4420CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, 4421 VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) { 4422 bool skipCall = false; 4423 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4424 assert(my_data != NULL); 4425 4426 skipCall |= parameter_validation_vkCmdCopyQueryPoolResults(my_data->report_data, queryPool, firstQuery, queryCount, dstBuffer, 4427 dstOffset, stride, flags); 4428 4429 if (!skipCall) { 4430 get_dispatch_table(pc_device_table_map, commandBuffer) 4431 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); 4432 } 4433} 4434 4435VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, 4436 VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, 4437 const void *pValues) { 4438 bool skipCall = false; 4439 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4440 assert(my_data != NULL); 4441 4442 skipCall |= parameter_validation_vkCmdPushConstants(my_data->report_data, layout, stageFlags, offset, size, pValues); 4443 4444 if (!skipCall) { 4445 get_dispatch_table(pc_device_table_map, commandBuffer) 4446 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); 4447 } 4448} 4449 4450VKAPI_ATTR void VKAPI_CALL 4451CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents) { 4452 bool skipCall = false; 4453 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4454 assert(my_data != NULL); 4455 4456 skipCall |= parameter_validation_vkCmdBeginRenderPass(my_data->report_data, pRenderPassBegin, contents); 4457 4458 if (!skipCall) { 4459 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); 4460 } 4461} 4462 4463VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) { 4464 bool skipCall = false; 4465 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4466 assert(my_data != NULL); 4467 4468 skipCall |= parameter_validation_vkCmdNextSubpass(my_data->report_data, contents); 4469 4470 if (!skipCall) { 4471 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents); 4472 } 4473} 4474 4475VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) { 4476 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer); 4477} 4478 4479VKAPI_ATTR void VKAPI_CALL 4480CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) { 4481 bool skipCall = false; 4482 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4483 assert(my_data != NULL); 4484 4485 skipCall |= parameter_validation_vkCmdExecuteCommands(my_data->report_data, commandBufferCount, pCommandBuffers); 4486 4487 if (!skipCall) { 4488 get_dispatch_table(pc_device_table_map, commandBuffer) 4489 ->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); 4490 } 4491} 4492 4493VKAPI_ATTR VkResult VKAPI_CALL 4494EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { 4495 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 4496} 4497 4498VKAPI_ATTR VkResult VKAPI_CALL 4499EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, VkLayerProperties *pProperties) { 4500 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 4501} 4502 4503VKAPI_ATTR VkResult VKAPI_CALL 4504EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, VkExtensionProperties *pProperties) { 4505 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) 4506 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties); 4507 4508 return VK_ERROR_LAYER_NOT_PRESENT; 4509} 4510 4511VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, 4512 const char *pLayerName, uint32_t *pCount, 4513 VkExtensionProperties *pProperties) { 4514 /* parameter_validation does not have any physical device extensions */ 4515 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) 4516 return util_GetExtensionProperties(0, NULL, pCount, pProperties); 4517 4518 assert(physicalDevice); 4519 4520 return get_dispatch_table(pc_instance_table_map, physicalDevice) 4521 ->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties); 4522} 4523 4524static PFN_vkVoidFunction 4525intercept_core_instance_command(const char *name); 4526 4527static PFN_vkVoidFunction 4528intercept_core_device_command(const char *name); 4529 4530VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) { 4531 assert(device); 4532 4533 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 4534 4535 if (validate_string(data->report_data, "vkGetDeviceProcAddr", "funcName", funcName)) { 4536 return NULL; 4537 } 4538 4539 PFN_vkVoidFunction proc = intercept_core_device_command(funcName); 4540 if (proc) 4541 return proc; 4542 4543 if (get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr == NULL) 4544 return NULL; 4545 return get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr(device, funcName); 4546} 4547 4548VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) { 4549 PFN_vkVoidFunction proc = intercept_core_instance_command(funcName); 4550 if (!proc) 4551 proc = intercept_core_device_command(funcName); 4552 if (proc) 4553 return proc; 4554 4555 assert(instance); 4556 4557 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4558 4559 proc = debug_report_get_instance_proc_addr(data->report_data, funcName); 4560 if (proc) 4561 return proc; 4562 4563 if (get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr == NULL) 4564 return NULL; 4565 return get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName); 4566} 4567 4568static PFN_vkVoidFunction 4569intercept_core_instance_command(const char *name) { 4570 static const struct { 4571 const char *name; 4572 PFN_vkVoidFunction proc; 4573 } core_instance_commands[] = { 4574 { "vkGetInstanceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetInstanceProcAddr) }, 4575 { "vkCreateInstance", reinterpret_cast<PFN_vkVoidFunction>(CreateInstance) }, 4576 { "vkDestroyInstance", reinterpret_cast<PFN_vkVoidFunction>(DestroyInstance) }, 4577 { "vkCreateDevice", reinterpret_cast<PFN_vkVoidFunction>(CreateDevice) }, 4578 { "vkEnumeratePhysicalDevices", reinterpret_cast<PFN_vkVoidFunction>(EnumeratePhysicalDevices) }, 4579 { "vkGetPhysicalDeviceProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceProperties) }, 4580 { "vkGetPhysicalDeviceFeatures", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceFeatures) }, 4581 { "vkGetPhysicalDeviceFormatProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceFormatProperties) }, 4582 { "vkGetPhysicalDeviceImageFormatProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceImageFormatProperties) }, 4583 { "vkGetPhysicalDeviceSparseImageFormatProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSparseImageFormatProperties) }, 4584 { "vkGetPhysicalDeviceQueueFamilyProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceQueueFamilyProperties) }, 4585 { "vkGetPhysicalDeviceMemoryProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMemoryProperties) }, 4586 { "vkEnumerateInstanceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceLayerProperties) }, 4587 { "vkEnumerateDeviceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateDeviceLayerProperties) }, 4588 { "vkEnumerateInstanceExtensionProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceExtensionProperties) }, 4589 { "vkEnumerateDeviceExtensionProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateDeviceExtensionProperties) }, 4590 }; 4591 4592 for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) { 4593 if (!strcmp(core_instance_commands[i].name, name)) 4594 return core_instance_commands[i].proc; 4595 } 4596 4597 return nullptr; 4598} 4599 4600static PFN_vkVoidFunction 4601intercept_core_device_command(const char *name) { 4602 static const struct { 4603 const char *name; 4604 PFN_vkVoidFunction proc; 4605 } core_device_commands[] = { 4606 { "vkGetDeviceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr) }, 4607 { "vkDestroyDevice", reinterpret_cast<PFN_vkVoidFunction>(DestroyDevice) }, 4608 { "vkGetDeviceQueue", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceQueue) }, 4609 { "vkQueueSubmit", reinterpret_cast<PFN_vkVoidFunction>(QueueSubmit) }, 4610 { "vkQueueWaitIdle", reinterpret_cast<PFN_vkVoidFunction>(QueueWaitIdle) }, 4611 { "vkDeviceWaitIdle", reinterpret_cast<PFN_vkVoidFunction>(DeviceWaitIdle) }, 4612 { "vkAllocateMemory", reinterpret_cast<PFN_vkVoidFunction>(AllocateMemory) }, 4613 { "vkFreeMemory", reinterpret_cast<PFN_vkVoidFunction>(FreeMemory) }, 4614 { "vkMapMemory", reinterpret_cast<PFN_vkVoidFunction>(MapMemory) }, 4615 { "vkUnmapMemory", reinterpret_cast<PFN_vkVoidFunction>(UnmapMemory) }, 4616 { "vkFlushMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(FlushMappedMemoryRanges) }, 4617 { "vkInvalidateMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(InvalidateMappedMemoryRanges) }, 4618 { "vkGetDeviceMemoryCommitment", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceMemoryCommitment) }, 4619 { "vkBindBufferMemory", reinterpret_cast<PFN_vkVoidFunction>(BindBufferMemory) }, 4620 { "vkBindImageMemory", reinterpret_cast<PFN_vkVoidFunction>(BindImageMemory) }, 4621 { "vkCreateFence", reinterpret_cast<PFN_vkVoidFunction>(CreateFence) }, 4622 { "vkDestroyFence", reinterpret_cast<PFN_vkVoidFunction>(DestroyFence) }, 4623 { "vkResetFences", reinterpret_cast<PFN_vkVoidFunction>(ResetFences) }, 4624 { "vkGetFenceStatus", reinterpret_cast<PFN_vkVoidFunction>(GetFenceStatus) }, 4625 { "vkWaitForFences", reinterpret_cast<PFN_vkVoidFunction>(WaitForFences) }, 4626 { "vkCreateSemaphore", reinterpret_cast<PFN_vkVoidFunction>(CreateSemaphore) }, 4627 { "vkDestroySemaphore", reinterpret_cast<PFN_vkVoidFunction>(DestroySemaphore) }, 4628 { "vkCreateEvent", reinterpret_cast<PFN_vkVoidFunction>(CreateEvent) }, 4629 { "vkDestroyEvent", reinterpret_cast<PFN_vkVoidFunction>(DestroyEvent) }, 4630 { "vkGetEventStatus", reinterpret_cast<PFN_vkVoidFunction>(GetEventStatus) }, 4631 { "vkSetEvent", reinterpret_cast<PFN_vkVoidFunction>(SetEvent) }, 4632 { "vkResetEvent", reinterpret_cast<PFN_vkVoidFunction>(ResetEvent) }, 4633 { "vkCreateQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CreateQueryPool) }, 4634 { "vkDestroyQueryPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyQueryPool) }, 4635 { "vkGetQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(GetQueryPoolResults) }, 4636 { "vkCreateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateBuffer) }, 4637 { "vkDestroyBuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyBuffer) }, 4638 { "vkCreateBufferView", reinterpret_cast<PFN_vkVoidFunction>(CreateBufferView) }, 4639 { "vkDestroyBufferView", reinterpret_cast<PFN_vkVoidFunction>(DestroyBufferView) }, 4640 { "vkCreateImage", reinterpret_cast<PFN_vkVoidFunction>(CreateImage) }, 4641 { "vkDestroyImage", reinterpret_cast<PFN_vkVoidFunction>(DestroyImage) }, 4642 { "vkGetImageSubresourceLayout", reinterpret_cast<PFN_vkVoidFunction>(GetImageSubresourceLayout) }, 4643 { "vkCreateImageView", reinterpret_cast<PFN_vkVoidFunction>(CreateImageView) }, 4644 { "vkDestroyImageView", reinterpret_cast<PFN_vkVoidFunction>(DestroyImageView) }, 4645 { "vkCreateShaderModule", reinterpret_cast<PFN_vkVoidFunction>(CreateShaderModule) }, 4646 { "vkDestroyShaderModule", reinterpret_cast<PFN_vkVoidFunction>(DestroyShaderModule) }, 4647 { "vkCreatePipelineCache", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineCache) }, 4648 { "vkDestroyPipelineCache", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineCache) }, 4649 { "vkGetPipelineCacheData", reinterpret_cast<PFN_vkVoidFunction>(GetPipelineCacheData) }, 4650 { "vkMergePipelineCaches", reinterpret_cast<PFN_vkVoidFunction>(MergePipelineCaches) }, 4651 { "vkCreateGraphicsPipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateGraphicsPipelines) }, 4652 { "vkCreateComputePipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateComputePipelines) }, 4653 { "vkDestroyPipeline", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipeline) }, 4654 { "vkCreatePipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineLayout) }, 4655 { "vkDestroyPipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineLayout) }, 4656 { "vkCreateSampler", reinterpret_cast<PFN_vkVoidFunction>(CreateSampler) }, 4657 { "vkDestroySampler", reinterpret_cast<PFN_vkVoidFunction>(DestroySampler) }, 4658 { "vkCreateDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorSetLayout) }, 4659 { "vkDestroyDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorSetLayout) }, 4660 { "vkCreateDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorPool) }, 4661 { "vkDestroyDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorPool) }, 4662 { "vkResetDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(ResetDescriptorPool) }, 4663 { "vkAllocateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(AllocateDescriptorSets) }, 4664 { "vkFreeDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(FreeDescriptorSets) }, 4665 { "vkUpdateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(UpdateDescriptorSets) }, 4666 { "vkCmdSetViewport", reinterpret_cast<PFN_vkVoidFunction>(CmdSetViewport) }, 4667 { "vkCmdSetScissor", reinterpret_cast<PFN_vkVoidFunction>(CmdSetScissor) }, 4668 { "vkCmdSetLineWidth", reinterpret_cast<PFN_vkVoidFunction>(CmdSetLineWidth) }, 4669 { "vkCmdSetDepthBias", reinterpret_cast<PFN_vkVoidFunction>(CmdSetDepthBias) }, 4670 { "vkCmdSetBlendConstants", reinterpret_cast<PFN_vkVoidFunction>(CmdSetBlendConstants) }, 4671 { "vkCmdSetDepthBounds", reinterpret_cast<PFN_vkVoidFunction>(CmdSetDepthBounds) }, 4672 { "vkCmdSetStencilCompareMask", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilCompareMask) }, 4673 { "vkCmdSetStencilWriteMask", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilWriteMask) }, 4674 { "vkCmdSetStencilReference", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilReference) }, 4675 { "vkAllocateCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(AllocateCommandBuffers) }, 4676 { "vkFreeCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(FreeCommandBuffers) }, 4677 { "vkBeginCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(BeginCommandBuffer) }, 4678 { "vkEndCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(EndCommandBuffer) }, 4679 { "vkResetCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(ResetCommandBuffer) }, 4680 { "vkCmdBindPipeline", reinterpret_cast<PFN_vkVoidFunction>(CmdBindPipeline) }, 4681 { "vkCmdBindDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(CmdBindDescriptorSets) }, 4682 { "vkCmdBindVertexBuffers", reinterpret_cast<PFN_vkVoidFunction>(CmdBindVertexBuffers) }, 4683 { "vkCmdBindIndexBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdBindIndexBuffer) }, 4684 { "vkCmdDraw", reinterpret_cast<PFN_vkVoidFunction>(CmdDraw) }, 4685 { "vkCmdDrawIndexed", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexed) }, 4686 { "vkCmdDrawIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndirect) }, 4687 { "vkCmdDrawIndexedIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexedIndirect) }, 4688 { "vkCmdDispatch", reinterpret_cast<PFN_vkVoidFunction>(CmdDispatch) }, 4689 { "vkCmdDispatchIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDispatchIndirect) }, 4690 { "vkCmdCopyBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBuffer) }, 4691 { "vkCmdCopyImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImage) }, 4692 { "vkCmdBlitImage", reinterpret_cast<PFN_vkVoidFunction>(CmdBlitImage) }, 4693 { "vkCmdCopyBufferToImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBufferToImage) }, 4694 { "vkCmdCopyImageToBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImageToBuffer) }, 4695 { "vkCmdUpdateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdUpdateBuffer) }, 4696 { "vkCmdFillBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdFillBuffer) }, 4697 { "vkCmdClearColorImage", reinterpret_cast<PFN_vkVoidFunction>(CmdClearColorImage) }, 4698 { "vkCmdResolveImage", reinterpret_cast<PFN_vkVoidFunction>(CmdResolveImage) }, 4699 { "vkCmdSetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdSetEvent) }, 4700 { "vkCmdResetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdResetEvent) }, 4701 { "vkCmdWaitEvents", reinterpret_cast<PFN_vkVoidFunction>(CmdWaitEvents) }, 4702 { "vkCmdPipelineBarrier", reinterpret_cast<PFN_vkVoidFunction>(CmdPipelineBarrier) }, 4703 { "vkCmdBeginQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginQuery) }, 4704 { "vkCmdEndQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdEndQuery) }, 4705 { "vkCmdResetQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CmdResetQueryPool) }, 4706 { "vkCmdWriteTimestamp", reinterpret_cast<PFN_vkVoidFunction>(CmdWriteTimestamp) }, 4707 { "vkCmdCopyQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyQueryPoolResults) }, 4708 { "vkCreateFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateFramebuffer) }, 4709 { "vkDestroyFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyFramebuffer) }, 4710 { "vkCreateRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CreateRenderPass) }, 4711 { "vkDestroyRenderPass", reinterpret_cast<PFN_vkVoidFunction>(DestroyRenderPass) }, 4712 { "vkGetRenderAreaGranularity", reinterpret_cast<PFN_vkVoidFunction>(GetRenderAreaGranularity) }, 4713 { "vkCreateCommandPool", reinterpret_cast<PFN_vkVoidFunction>(CreateCommandPool) }, 4714 { "vkDestroyCommandPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyCommandPool) }, 4715 { "vkResetCommandPool", reinterpret_cast<PFN_vkVoidFunction>(ResetCommandPool) }, 4716 { "vkCmdBeginRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginRenderPass) }, 4717 { "vkCmdNextSubpass", reinterpret_cast<PFN_vkVoidFunction>(CmdNextSubpass) }, 4718 }; 4719 4720 for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) { 4721 if (!strcmp(core_device_commands[i].name, name)) 4722 return core_device_commands[i].proc; 4723 } 4724 4725 return nullptr; 4726} 4727 4728} // namespace parameter_validation 4729 4730// vk_layer_logging.h expects these to be defined 4731 4732VKAPI_ATTR VkResult VKAPI_CALL 4733vkCreateDebugReportCallbackEXT(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 4734 const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pMsgCallback) { 4735 return parameter_validation::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback); 4736} 4737 4738VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, 4739 VkDebugReportCallbackEXT msgCallback, 4740 const VkAllocationCallbacks *pAllocator) { 4741 parameter_validation::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 4742} 4743 4744VKAPI_ATTR void VKAPI_CALL 4745vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t object, 4746 size_t location, int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 4747 parameter_validation::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 4748} 4749 4750// loader-layer interface v0 4751 4752VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL 4753vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, VkExtensionProperties *pProperties) { 4754 return parameter_validation::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties); 4755} 4756 4757VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL 4758vkEnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { 4759 return parameter_validation::EnumerateInstanceLayerProperties(pCount, pProperties); 4760} 4761 4762VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL 4763vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, VkLayerProperties *pProperties) { 4764 // the layer command handles VK_NULL_HANDLE just fine internally 4765 assert(physicalDevice == VK_NULL_HANDLE); 4766 return parameter_validation::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties); 4767} 4768 4769VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, 4770 const char *pLayerName, uint32_t *pCount, 4771 VkExtensionProperties *pProperties) { 4772 // the layer command handles VK_NULL_HANDLE just fine internally 4773 assert(physicalDevice == VK_NULL_HANDLE); 4774 return parameter_validation::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties); 4775} 4776 4777VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) { 4778 return parameter_validation::GetDeviceProcAddr(dev, funcName); 4779} 4780 4781VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) { 4782 return parameter_validation::GetInstanceProcAddr(instance, funcName); 4783} 4784