parameter_validation.cpp revision 93a6d51ebe5141fd3468aedf34436f2fc8b0984b
1/* Copyright (c) 2015-2016 The Khronos Group Inc. 2 * Copyright (c) 2015-2016 Valve Corporation 3 * Copyright (c) 2015-2016 LunarG, Inc. 4 * Copyright (C) 2015-2016 Google Inc. 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 * 18 * Author: Jeremy Hayes <jeremy@lunarg.com> 19 * Author: Tony Barbour <tony@LunarG.com> 20 * Author: Mark Lobodzinski <mark@LunarG.com> 21 * Author: Dustin Graves <dustin@lunarg.com> 22 */ 23 24#define NOMINMAX 25 26#include <math.h> 27#include <stdio.h> 28#include <stdlib.h> 29#include <string.h> 30 31#include <iostream> 32#include <string> 33#include <sstream> 34#include <unordered_map> 35#include <unordered_set> 36#include <vector> 37 38#include "vk_loader_platform.h" 39#include "vulkan/vk_layer.h" 40#include "vk_layer_config.h" 41#include "vk_enum_validate_helper.h" 42#include "vk_struct_validate_helper.h" 43 44#include "vk_layer_table.h" 45#include "vk_layer_data.h" 46#include "vk_layer_logging.h" 47#include "vk_layer_extension_utils.h" 48#include "vk_layer_utils.h" 49 50#include "parameter_validation.h" 51 52namespace parameter_validation { 53 54struct layer_data { 55 VkInstance instance; 56 57 debug_report_data *report_data; 58 std::vector<VkDebugReportCallbackEXT> logging_callback; 59 60 // The following are for keeping track of the temporary callbacks that can 61 // be used in vkCreateInstance and vkDestroyInstance: 62 uint32_t num_tmp_callbacks; 63 VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos; 64 VkDebugReportCallbackEXT *tmp_callbacks; 65 66 // TODO: Split instance/device structs 67 // Device Data 68 // Map for queue family index to queue count 69 std::unordered_map<uint32_t, uint32_t> queueFamilyIndexMap; 70 VkPhysicalDeviceLimits device_limits; 71 VkPhysicalDeviceFeatures physical_device_features; 72 VkPhysicalDevice physical_device; 73 74 bool wsi_enabled; 75 76 layer_data() 77 : report_data(nullptr), num_tmp_callbacks(0), tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr), device_limits{}, 78 physical_device_features{}, physical_device{}, wsi_enabled(false){}; 79}; 80 81static std::unordered_map<void *, struct instance_extension_enables> instance_extension_map; 82static std::unordered_map<void *, layer_data *> layer_data_map; 83static device_table_map pc_device_table_map; 84static instance_table_map pc_instance_table_map; 85 86// "my instance data" 87debug_report_data *mid(VkInstance object) { 88 dispatch_key key = get_dispatch_key(object); 89 layer_data *data = get_my_data_ptr(key, layer_data_map); 90#if DISPATCH_MAP_DEBUG 91 fprintf(stderr, "MID: map: 0x%p, object: 0x%p, key: 0x%p, data: 0x%p\n", &layer_data_map, object, key, data); 92#endif 93 assert(data != NULL); 94 95 return data->report_data; 96} 97 98// "my device data" 99debug_report_data *mdd(void *object) { 100 dispatch_key key = get_dispatch_key(object); 101 layer_data *data = get_my_data_ptr(key, layer_data_map); 102#if DISPATCH_MAP_DEBUG 103 fprintf(stderr, "MDD: map: 0x%p, object: 0x%p, key: 0x%p, data: 0x%p\n", &layer_data_map, object, key, data); 104#endif 105 assert(data != NULL); 106 return data->report_data; 107} 108 109static void init_parameter_validation(layer_data *my_data, const VkAllocationCallbacks *pAllocator) { 110 111 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_parameter_validation"); 112} 113 114VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance, 115 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 116 const VkAllocationCallbacks *pAllocator, 117 VkDebugReportCallbackEXT *pMsgCallback) { 118 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 119 VkResult result = pTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback); 120 121 if (result == VK_SUCCESS) { 122 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 123 result = layer_create_msg_callback(data->report_data, false, pCreateInfo, pAllocator, pMsgCallback); 124 } 125 126 return result; 127} 128 129VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback, 130 const VkAllocationCallbacks *pAllocator) { 131 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 132 pTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 133 134 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 135 layer_destroy_msg_callback(data->report_data, msgCallback, pAllocator); 136} 137 138VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, 139 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location, 140 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 141 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 142 pTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 143} 144 145static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}}; 146 147static const VkLayerProperties global_layer = { 148 "VK_LAYER_LUNARG_parameter_validation", VK_LAYER_API_VERSION, 1, "LunarG Validation Layer", 149}; 150 151static bool ValidateEnumerator(VkFormatFeatureFlagBits const &enumerator) { 152 VkFormatFeatureFlagBits allFlags = (VkFormatFeatureFlagBits)( 153 VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT | 154 VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT | 155 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT | 156 VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | 157 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT | 158 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT); 159 if (enumerator & (~allFlags)) { 160 return false; 161 } 162 163 return true; 164} 165 166static std::string EnumeratorString(VkFormatFeatureFlagBits const &enumerator) { 167 if (!ValidateEnumerator(enumerator)) { 168 return "unrecognized enumerator"; 169 } 170 171 std::vector<std::string> strings; 172 if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT) { 173 strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"); 174 } 175 if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) { 176 strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"); 177 } 178 if (enumerator & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) { 179 strings.push_back("VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"); 180 } 181 if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) { 182 strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"); 183 } 184 if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) { 185 strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"); 186 } 187 if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT) { 188 strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"); 189 } 190 if (enumerator & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) { 191 strings.push_back("VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"); 192 } 193 if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) { 194 strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"); 195 } 196 if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) { 197 strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"); 198 } 199 if (enumerator & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) { 200 strings.push_back("VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"); 201 } 202 if (enumerator & VK_FORMAT_FEATURE_BLIT_SRC_BIT) { 203 strings.push_back("VK_FORMAT_FEATURE_BLIT_SRC_BIT"); 204 } 205 if (enumerator & VK_FORMAT_FEATURE_BLIT_DST_BIT) { 206 strings.push_back("VK_FORMAT_FEATURE_BLIT_DST_BIT"); 207 } 208 if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) { 209 strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT"); 210 } 211 212 std::string enumeratorString; 213 for (auto const &string : strings) { 214 enumeratorString += string; 215 216 if (string != strings.back()) { 217 enumeratorString += '|'; 218 } 219 } 220 221 return enumeratorString; 222} 223 224static bool ValidateEnumerator(VkImageUsageFlagBits const &enumerator) { 225 VkImageUsageFlagBits allFlags = (VkImageUsageFlagBits)( 226 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | 227 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | 228 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT); 229 if (enumerator & (~allFlags)) { 230 return false; 231 } 232 233 return true; 234} 235 236static std::string EnumeratorString(VkImageUsageFlagBits const &enumerator) { 237 if (!ValidateEnumerator(enumerator)) { 238 return "unrecognized enumerator"; 239 } 240 241 std::vector<std::string> strings; 242 if (enumerator & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) { 243 strings.push_back("VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"); 244 } 245 if (enumerator & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { 246 strings.push_back("VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"); 247 } 248 if (enumerator & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) { 249 strings.push_back("VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"); 250 } 251 if (enumerator & VK_IMAGE_USAGE_STORAGE_BIT) { 252 strings.push_back("VK_IMAGE_USAGE_STORAGE_BIT"); 253 } 254 if (enumerator & VK_IMAGE_USAGE_SAMPLED_BIT) { 255 strings.push_back("VK_IMAGE_USAGE_SAMPLED_BIT"); 256 } 257 if (enumerator & VK_IMAGE_USAGE_TRANSFER_DST_BIT) { 258 strings.push_back("VK_IMAGE_USAGE_TRANSFER_DST_BIT"); 259 } 260 if (enumerator & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT) { 261 strings.push_back("VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"); 262 } 263 if (enumerator & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) { 264 strings.push_back("VK_IMAGE_USAGE_TRANSFER_SRC_BIT"); 265 } 266 267 std::string enumeratorString; 268 for (auto const &string : strings) { 269 enumeratorString += string; 270 271 if (string != strings.back()) { 272 enumeratorString += '|'; 273 } 274 } 275 276 return enumeratorString; 277} 278 279static bool ValidateEnumerator(VkQueueFlagBits const &enumerator) { 280 VkQueueFlagBits allFlags = 281 (VkQueueFlagBits)(VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_SPARSE_BINDING_BIT | VK_QUEUE_GRAPHICS_BIT); 282 if (enumerator & (~allFlags)) { 283 return false; 284 } 285 286 return true; 287} 288 289static std::string EnumeratorString(VkQueueFlagBits const &enumerator) { 290 if (!ValidateEnumerator(enumerator)) { 291 return "unrecognized enumerator"; 292 } 293 294 std::vector<std::string> strings; 295 if (enumerator & VK_QUEUE_TRANSFER_BIT) { 296 strings.push_back("VK_QUEUE_TRANSFER_BIT"); 297 } 298 if (enumerator & VK_QUEUE_COMPUTE_BIT) { 299 strings.push_back("VK_QUEUE_COMPUTE_BIT"); 300 } 301 if (enumerator & VK_QUEUE_SPARSE_BINDING_BIT) { 302 strings.push_back("VK_QUEUE_SPARSE_BINDING_BIT"); 303 } 304 if (enumerator & VK_QUEUE_GRAPHICS_BIT) { 305 strings.push_back("VK_QUEUE_GRAPHICS_BIT"); 306 } 307 308 std::string enumeratorString; 309 for (auto const &string : strings) { 310 enumeratorString += string; 311 312 if (string != strings.back()) { 313 enumeratorString += '|'; 314 } 315 } 316 317 return enumeratorString; 318} 319 320static bool ValidateEnumerator(VkMemoryPropertyFlagBits const &enumerator) { 321 VkMemoryPropertyFlagBits allFlags = (VkMemoryPropertyFlagBits)( 322 VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | 323 VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); 324 if (enumerator & (~allFlags)) { 325 return false; 326 } 327 328 return true; 329} 330 331static std::string EnumeratorString(VkMemoryPropertyFlagBits const &enumerator) { 332 if (!ValidateEnumerator(enumerator)) { 333 return "unrecognized enumerator"; 334 } 335 336 std::vector<std::string> strings; 337 if (enumerator & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) { 338 strings.push_back("VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"); 339 } 340 if (enumerator & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) { 341 strings.push_back("VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"); 342 } 343 if (enumerator & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { 344 strings.push_back("VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"); 345 } 346 if (enumerator & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) { 347 strings.push_back("VK_MEMORY_PROPERTY_HOST_CACHED_BIT"); 348 } 349 if (enumerator & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { 350 strings.push_back("VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"); 351 } 352 353 std::string enumeratorString; 354 for (auto const &string : strings) { 355 enumeratorString += string; 356 357 if (string != strings.back()) { 358 enumeratorString += '|'; 359 } 360 } 361 362 return enumeratorString; 363} 364 365static bool ValidateEnumerator(VkMemoryHeapFlagBits const &enumerator) { 366 VkMemoryHeapFlagBits allFlags = (VkMemoryHeapFlagBits)(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT); 367 if (enumerator & (~allFlags)) { 368 return false; 369 } 370 371 return true; 372} 373 374static std::string EnumeratorString(VkMemoryHeapFlagBits const &enumerator) { 375 if (!ValidateEnumerator(enumerator)) { 376 return "unrecognized enumerator"; 377 } 378 379 std::vector<std::string> strings; 380 if (enumerator & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) { 381 strings.push_back("VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"); 382 } 383 384 std::string enumeratorString; 385 for (auto const &string : strings) { 386 enumeratorString += string; 387 388 if (string != strings.back()) { 389 enumeratorString += '|'; 390 } 391 } 392 393 return enumeratorString; 394} 395 396static bool ValidateEnumerator(VkSparseImageFormatFlagBits const &enumerator) { 397 VkSparseImageFormatFlagBits allFlags = 398 (VkSparseImageFormatFlagBits)(VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT | 399 VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT | VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT); 400 if (enumerator & (~allFlags)) { 401 return false; 402 } 403 404 return true; 405} 406 407static std::string EnumeratorString(VkSparseImageFormatFlagBits const &enumerator) { 408 if (!ValidateEnumerator(enumerator)) { 409 return "unrecognized enumerator"; 410 } 411 412 std::vector<std::string> strings; 413 if (enumerator & VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT) { 414 strings.push_back("VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT"); 415 } 416 if (enumerator & VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT) { 417 strings.push_back("VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"); 418 } 419 if (enumerator & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) { 420 strings.push_back("VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"); 421 } 422 423 std::string enumeratorString; 424 for (auto const &string : strings) { 425 enumeratorString += string; 426 427 if (string != strings.back()) { 428 enumeratorString += '|'; 429 } 430 } 431 432 return enumeratorString; 433} 434 435static bool ValidateEnumerator(VkFenceCreateFlagBits const &enumerator) { 436 VkFenceCreateFlagBits allFlags = (VkFenceCreateFlagBits)(VK_FENCE_CREATE_SIGNALED_BIT); 437 if (enumerator & (~allFlags)) { 438 return false; 439 } 440 441 return true; 442} 443 444static std::string EnumeratorString(VkFenceCreateFlagBits const &enumerator) { 445 if (!ValidateEnumerator(enumerator)) { 446 return "unrecognized enumerator"; 447 } 448 449 std::vector<std::string> strings; 450 if (enumerator & VK_FENCE_CREATE_SIGNALED_BIT) { 451 strings.push_back("VK_FENCE_CREATE_SIGNALED_BIT"); 452 } 453 454 std::string enumeratorString; 455 for (auto const &string : strings) { 456 enumeratorString += string; 457 458 if (string != strings.back()) { 459 enumeratorString += '|'; 460 } 461 } 462 463 return enumeratorString; 464} 465 466static bool ValidateEnumerator(VkQueryPipelineStatisticFlagBits const &enumerator) { 467 VkQueryPipelineStatisticFlagBits allFlags = (VkQueryPipelineStatisticFlagBits)( 468 VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT | 469 VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT | VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT | 470 VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT | 471 VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT | VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT | 472 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT | 473 VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT | 474 VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT); 475 if (enumerator & (~allFlags)) { 476 return false; 477 } 478 479 return true; 480} 481 482static std::string EnumeratorString(VkQueryPipelineStatisticFlagBits const &enumerator) { 483 if (!ValidateEnumerator(enumerator)) { 484 return "unrecognized enumerator"; 485 } 486 487 std::vector<std::string> strings; 488 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT) { 489 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"); 490 } 491 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT) { 492 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"); 493 } 494 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT) { 495 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"); 496 } 497 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT) { 498 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"); 499 } 500 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT) { 501 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"); 502 } 503 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT) { 504 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"); 505 } 506 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT) { 507 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"); 508 } 509 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT) { 510 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"); 511 } 512 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT) { 513 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"); 514 } 515 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT) { 516 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"); 517 } 518 if (enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT) { 519 strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT"); 520 } 521 522 std::string enumeratorString; 523 for (auto const &string : strings) { 524 enumeratorString += string; 525 526 if (string != strings.back()) { 527 enumeratorString += '|'; 528 } 529 } 530 531 return enumeratorString; 532} 533 534static bool ValidateEnumerator(VkQueryResultFlagBits const &enumerator) { 535 VkQueryResultFlagBits allFlags = (VkQueryResultFlagBits)(VK_QUERY_RESULT_PARTIAL_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT | 536 VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT); 537 if (enumerator & (~allFlags)) { 538 return false; 539 } 540 541 return true; 542} 543 544static std::string EnumeratorString(VkQueryResultFlagBits const &enumerator) { 545 if (!ValidateEnumerator(enumerator)) { 546 return "unrecognized enumerator"; 547 } 548 549 std::vector<std::string> strings; 550 if (enumerator & VK_QUERY_RESULT_PARTIAL_BIT) { 551 strings.push_back("VK_QUERY_RESULT_PARTIAL_BIT"); 552 } 553 if (enumerator & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) { 554 strings.push_back("VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"); 555 } 556 if (enumerator & VK_QUERY_RESULT_WAIT_BIT) { 557 strings.push_back("VK_QUERY_RESULT_WAIT_BIT"); 558 } 559 if (enumerator & VK_QUERY_RESULT_64_BIT) { 560 strings.push_back("VK_QUERY_RESULT_64_BIT"); 561 } 562 563 std::string enumeratorString; 564 for (auto const &string : strings) { 565 enumeratorString += string; 566 567 if (string != strings.back()) { 568 enumeratorString += '|'; 569 } 570 } 571 572 return enumeratorString; 573} 574 575static bool ValidateEnumerator(VkBufferUsageFlagBits const &enumerator) { 576 VkBufferUsageFlagBits allFlags = (VkBufferUsageFlagBits)( 577 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | 578 VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | 579 VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT); 580 if (enumerator & (~allFlags)) { 581 return false; 582 } 583 584 return true; 585} 586 587static std::string EnumeratorString(VkBufferUsageFlagBits const &enumerator) { 588 if (!ValidateEnumerator(enumerator)) { 589 return "unrecognized enumerator"; 590 } 591 592 std::vector<std::string> strings; 593 if (enumerator & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) { 594 strings.push_back("VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"); 595 } 596 if (enumerator & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) { 597 strings.push_back("VK_BUFFER_USAGE_INDEX_BUFFER_BIT"); 598 } 599 if (enumerator & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT) { 600 strings.push_back("VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"); 601 } 602 if (enumerator & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) { 603 strings.push_back("VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"); 604 } 605 if (enumerator & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) { 606 strings.push_back("VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"); 607 } 608 if (enumerator & VK_BUFFER_USAGE_TRANSFER_DST_BIT) { 609 strings.push_back("VK_BUFFER_USAGE_TRANSFER_DST_BIT"); 610 } 611 if (enumerator & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) { 612 strings.push_back("VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"); 613 } 614 if (enumerator & VK_BUFFER_USAGE_TRANSFER_SRC_BIT) { 615 strings.push_back("VK_BUFFER_USAGE_TRANSFER_SRC_BIT"); 616 } 617 if (enumerator & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) { 618 strings.push_back("VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"); 619 } 620 621 std::string enumeratorString; 622 for (auto const &string : strings) { 623 enumeratorString += string; 624 625 if (string != strings.back()) { 626 enumeratorString += '|'; 627 } 628 } 629 630 return enumeratorString; 631} 632 633static bool ValidateEnumerator(VkBufferCreateFlagBits const &enumerator) { 634 VkBufferCreateFlagBits allFlags = (VkBufferCreateFlagBits)( 635 VK_BUFFER_CREATE_SPARSE_ALIASED_BIT | VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT | VK_BUFFER_CREATE_SPARSE_BINDING_BIT); 636 if (enumerator & (~allFlags)) { 637 return false; 638 } 639 640 return true; 641} 642 643static std::string EnumeratorString(VkBufferCreateFlagBits const &enumerator) { 644 if (!ValidateEnumerator(enumerator)) { 645 return "unrecognized enumerator"; 646 } 647 648 std::vector<std::string> strings; 649 if (enumerator & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) { 650 strings.push_back("VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"); 651 } 652 if (enumerator & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) { 653 strings.push_back("VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"); 654 } 655 if (enumerator & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) { 656 strings.push_back("VK_BUFFER_CREATE_SPARSE_BINDING_BIT"); 657 } 658 659 std::string enumeratorString; 660 for (auto const &string : strings) { 661 enumeratorString += string; 662 663 if (string != strings.back()) { 664 enumeratorString += '|'; 665 } 666 } 667 668 return enumeratorString; 669} 670 671static bool ValidateEnumerator(VkImageCreateFlagBits const &enumerator) { 672 VkImageCreateFlagBits allFlags = (VkImageCreateFlagBits)( 673 VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT | VK_IMAGE_CREATE_SPARSE_ALIASED_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | 674 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_SPARSE_BINDING_BIT); 675 if (enumerator & (~allFlags)) { 676 return false; 677 } 678 679 return true; 680} 681 682static std::string EnumeratorString(VkImageCreateFlagBits const &enumerator) { 683 if (!ValidateEnumerator(enumerator)) { 684 return "unrecognized enumerator"; 685 } 686 687 std::vector<std::string> strings; 688 if (enumerator & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) { 689 strings.push_back("VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"); 690 } 691 if (enumerator & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) { 692 strings.push_back("VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"); 693 } 694 if (enumerator & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) { 695 strings.push_back("VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"); 696 } 697 if (enumerator & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) { 698 strings.push_back("VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"); 699 } 700 if (enumerator & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) { 701 strings.push_back("VK_IMAGE_CREATE_SPARSE_BINDING_BIT"); 702 } 703 704 std::string enumeratorString; 705 for (auto const &string : strings) { 706 enumeratorString += string; 707 708 if (string != strings.back()) { 709 enumeratorString += '|'; 710 } 711 } 712 713 return enumeratorString; 714} 715 716static bool ValidateEnumerator(VkColorComponentFlagBits const &enumerator) { 717 VkColorComponentFlagBits allFlags = (VkColorComponentFlagBits)(VK_COLOR_COMPONENT_A_BIT | VK_COLOR_COMPONENT_B_BIT | 718 VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_R_BIT); 719 if (enumerator & (~allFlags)) { 720 return false; 721 } 722 723 return true; 724} 725 726static std::string EnumeratorString(VkColorComponentFlagBits const &enumerator) { 727 if (!ValidateEnumerator(enumerator)) { 728 return "unrecognized enumerator"; 729 } 730 731 std::vector<std::string> strings; 732 if (enumerator & VK_COLOR_COMPONENT_A_BIT) { 733 strings.push_back("VK_COLOR_COMPONENT_A_BIT"); 734 } 735 if (enumerator & VK_COLOR_COMPONENT_B_BIT) { 736 strings.push_back("VK_COLOR_COMPONENT_B_BIT"); 737 } 738 if (enumerator & VK_COLOR_COMPONENT_G_BIT) { 739 strings.push_back("VK_COLOR_COMPONENT_G_BIT"); 740 } 741 if (enumerator & VK_COLOR_COMPONENT_R_BIT) { 742 strings.push_back("VK_COLOR_COMPONENT_R_BIT"); 743 } 744 745 std::string enumeratorString; 746 for (auto const &string : strings) { 747 enumeratorString += string; 748 749 if (string != strings.back()) { 750 enumeratorString += '|'; 751 } 752 } 753 754 return enumeratorString; 755} 756 757static bool ValidateEnumerator(VkPipelineCreateFlagBits const &enumerator) { 758 VkPipelineCreateFlagBits allFlags = (VkPipelineCreateFlagBits)( 759 VK_PIPELINE_CREATE_DERIVATIVE_BIT | VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT | VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT); 760 if (enumerator & (~allFlags)) { 761 return false; 762 } 763 764 return true; 765} 766 767static std::string EnumeratorString(VkPipelineCreateFlagBits const &enumerator) { 768 if (!ValidateEnumerator(enumerator)) { 769 return "unrecognized enumerator"; 770 } 771 772 std::vector<std::string> strings; 773 if (enumerator & VK_PIPELINE_CREATE_DERIVATIVE_BIT) { 774 strings.push_back("VK_PIPELINE_CREATE_DERIVATIVE_BIT"); 775 } 776 if (enumerator & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT) { 777 strings.push_back("VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"); 778 } 779 if (enumerator & VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT) { 780 strings.push_back("VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"); 781 } 782 783 std::string enumeratorString; 784 for (auto const &string : strings) { 785 enumeratorString += string; 786 787 if (string != strings.back()) { 788 enumeratorString += '|'; 789 } 790 } 791 792 return enumeratorString; 793} 794 795static bool ValidateEnumerator(VkShaderStageFlagBits const &enumerator) { 796 VkShaderStageFlagBits allFlags = (VkShaderStageFlagBits)( 797 VK_SHADER_STAGE_ALL | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_COMPUTE_BIT | 798 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_VERTEX_BIT); 799 if (enumerator & (~allFlags)) { 800 return false; 801 } 802 803 return true; 804} 805 806static std::string EnumeratorString(VkShaderStageFlagBits const &enumerator) { 807 if (!ValidateEnumerator(enumerator)) { 808 return "unrecognized enumerator"; 809 } 810 811 std::vector<std::string> strings; 812 if (enumerator & VK_SHADER_STAGE_ALL) { 813 strings.push_back("VK_SHADER_STAGE_ALL"); 814 } 815 if (enumerator & VK_SHADER_STAGE_FRAGMENT_BIT) { 816 strings.push_back("VK_SHADER_STAGE_FRAGMENT_BIT"); 817 } 818 if (enumerator & VK_SHADER_STAGE_GEOMETRY_BIT) { 819 strings.push_back("VK_SHADER_STAGE_GEOMETRY_BIT"); 820 } 821 if (enumerator & VK_SHADER_STAGE_COMPUTE_BIT) { 822 strings.push_back("VK_SHADER_STAGE_COMPUTE_BIT"); 823 } 824 if (enumerator & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) { 825 strings.push_back("VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"); 826 } 827 if (enumerator & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) { 828 strings.push_back("VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"); 829 } 830 if (enumerator & VK_SHADER_STAGE_VERTEX_BIT) { 831 strings.push_back("VK_SHADER_STAGE_VERTEX_BIT"); 832 } 833 834 std::string enumeratorString; 835 for (auto const &string : strings) { 836 enumeratorString += string; 837 838 if (string != strings.back()) { 839 enumeratorString += '|'; 840 } 841 } 842 843 return enumeratorString; 844} 845 846static bool ValidateEnumerator(VkPipelineStageFlagBits const &enumerator) { 847 VkPipelineStageFlagBits allFlags = (VkPipelineStageFlagBits)( 848 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT | VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_HOST_BIT | 849 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | 850 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | 851 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | 852 VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | 853 VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT | 854 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT); 855 if (enumerator & (~allFlags)) { 856 return false; 857 } 858 859 return true; 860} 861 862static std::string EnumeratorString(VkPipelineStageFlagBits const &enumerator) { 863 if (!ValidateEnumerator(enumerator)) { 864 return "unrecognized enumerator"; 865 } 866 867 std::vector<std::string> strings; 868 if (enumerator & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) { 869 strings.push_back("VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"); 870 } 871 if (enumerator & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) { 872 strings.push_back("VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"); 873 } 874 if (enumerator & VK_PIPELINE_STAGE_HOST_BIT) { 875 strings.push_back("VK_PIPELINE_STAGE_HOST_BIT"); 876 } 877 if (enumerator & VK_PIPELINE_STAGE_TRANSFER_BIT) { 878 strings.push_back("VK_PIPELINE_STAGE_TRANSFER_BIT"); 879 } 880 if (enumerator & VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT) { 881 strings.push_back("VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"); 882 } 883 if (enumerator & VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT) { 884 strings.push_back("VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"); 885 } 886 if (enumerator & VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT) { 887 strings.push_back("VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"); 888 } 889 if (enumerator & VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT) { 890 strings.push_back("VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"); 891 } 892 if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT) { 893 strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT"); 894 } 895 if (enumerator & VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT) { 896 strings.push_back("VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"); 897 } 898 if (enumerator & VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT) { 899 strings.push_back("VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"); 900 } 901 if (enumerator & VK_PIPELINE_STAGE_VERTEX_SHADER_BIT) { 902 strings.push_back("VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"); 903 } 904 if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT) { 905 strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT"); 906 } 907 if (enumerator & VK_PIPELINE_STAGE_VERTEX_INPUT_BIT) { 908 strings.push_back("VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"); 909 } 910 if (enumerator & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT) { 911 strings.push_back("VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"); 912 } 913 if (enumerator & VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT) { 914 strings.push_back("VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"); 915 } 916 if (enumerator & VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) { 917 strings.push_back("VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"); 918 } 919 920 std::string enumeratorString; 921 for (auto const &string : strings) { 922 enumeratorString += string; 923 924 if (string != strings.back()) { 925 enumeratorString += '|'; 926 } 927 } 928 929 return enumeratorString; 930} 931 932static bool ValidateEnumerator(VkAccessFlagBits const &enumerator) { 933 VkAccessFlagBits allFlags = (VkAccessFlagBits)( 934 VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | 935 VK_ACCESS_UNIFORM_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | 936 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | 937 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT | 938 VK_ACCESS_HOST_READ_BIT | VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT); 939 940 if (enumerator & (~allFlags)) { 941 return false; 942 } 943 944 return true; 945} 946 947static std::string EnumeratorString(VkAccessFlagBits const &enumerator) { 948 if (!ValidateEnumerator(enumerator)) { 949 return "unrecognized enumerator"; 950 } 951 952 std::vector<std::string> strings; 953 if (enumerator & VK_ACCESS_INDIRECT_COMMAND_READ_BIT) { 954 strings.push_back("VK_ACCESS_INDIRECT_COMMAND_READ_BIT"); 955 } 956 if (enumerator & VK_ACCESS_INDEX_READ_BIT) { 957 strings.push_back("VK_ACCESS_INDEX_READ_BIT"); 958 } 959 if (enumerator & VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT) { 960 strings.push_back("VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"); 961 } 962 if (enumerator & VK_ACCESS_UNIFORM_READ_BIT) { 963 strings.push_back("VK_ACCESS_UNIFORM_READ_BIT"); 964 } 965 if (enumerator & VK_ACCESS_INPUT_ATTACHMENT_READ_BIT) { 966 strings.push_back("VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"); 967 } 968 if (enumerator & VK_ACCESS_SHADER_READ_BIT) { 969 strings.push_back("VK_ACCESS_SHADER_READ_BIT"); 970 } 971 if (enumerator & VK_ACCESS_SHADER_WRITE_BIT) { 972 strings.push_back("VK_ACCESS_SHADER_WRITE_BIT"); 973 } 974 if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_READ_BIT) { 975 strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"); 976 } 977 if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT) { 978 strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"); 979 } 980 if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT) { 981 strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"); 982 } 983 if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) { 984 strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"); 985 } 986 if (enumerator & VK_ACCESS_TRANSFER_READ_BIT) { 987 strings.push_back("VK_ACCESS_TRANSFER_READ_BIT"); 988 } 989 if (enumerator & VK_ACCESS_TRANSFER_WRITE_BIT) { 990 strings.push_back("VK_ACCESS_TRANSFER_WRITE_BIT"); 991 } 992 if (enumerator & VK_ACCESS_HOST_READ_BIT) { 993 strings.push_back("VK_ACCESS_HOST_READ_BIT"); 994 } 995 if (enumerator & VK_ACCESS_HOST_WRITE_BIT) { 996 strings.push_back("VK_ACCESS_HOST_WRITE_BIT"); 997 } 998 if (enumerator & VK_ACCESS_MEMORY_READ_BIT) { 999 strings.push_back("VK_ACCESS_MEMORY_READ_BIT"); 1000 } 1001 if (enumerator & VK_ACCESS_MEMORY_WRITE_BIT) { 1002 strings.push_back("VK_ACCESS_MEMORY_WRITE_BIT"); 1003 } 1004 1005 std::string enumeratorString; 1006 for (auto const &string : strings) { 1007 enumeratorString += string; 1008 1009 if (string != strings.back()) { 1010 enumeratorString += '|'; 1011 } 1012 } 1013 1014 return enumeratorString; 1015} 1016 1017static bool ValidateEnumerator(VkCommandPoolCreateFlagBits const &enumerator) { 1018 VkCommandPoolCreateFlagBits allFlags = 1019 (VkCommandPoolCreateFlagBits)(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT | VK_COMMAND_POOL_CREATE_TRANSIENT_BIT); 1020 if (enumerator & (~allFlags)) { 1021 return false; 1022 } 1023 1024 return true; 1025} 1026 1027static std::string EnumeratorString(VkCommandPoolCreateFlagBits const &enumerator) { 1028 if (!ValidateEnumerator(enumerator)) { 1029 return "unrecognized enumerator"; 1030 } 1031 1032 std::vector<std::string> strings; 1033 if (enumerator & VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT) { 1034 strings.push_back("VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"); 1035 } 1036 if (enumerator & VK_COMMAND_POOL_CREATE_TRANSIENT_BIT) { 1037 strings.push_back("VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"); 1038 } 1039 1040 std::string enumeratorString; 1041 for (auto const &string : strings) { 1042 enumeratorString += string; 1043 1044 if (string != strings.back()) { 1045 enumeratorString += '|'; 1046 } 1047 } 1048 1049 return enumeratorString; 1050} 1051 1052static bool ValidateEnumerator(VkCommandPoolResetFlagBits const &enumerator) { 1053 VkCommandPoolResetFlagBits allFlags = (VkCommandPoolResetFlagBits)(VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT); 1054 if (enumerator & (~allFlags)) { 1055 return false; 1056 } 1057 1058 return true; 1059} 1060 1061static std::string EnumeratorString(VkCommandPoolResetFlagBits const &enumerator) { 1062 if (!ValidateEnumerator(enumerator)) { 1063 return "unrecognized enumerator"; 1064 } 1065 1066 std::vector<std::string> strings; 1067 if (enumerator & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) { 1068 strings.push_back("VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"); 1069 } 1070 1071 std::string enumeratorString; 1072 for (auto const &string : strings) { 1073 enumeratorString += string; 1074 1075 if (string != strings.back()) { 1076 enumeratorString += '|'; 1077 } 1078 } 1079 1080 return enumeratorString; 1081} 1082 1083static bool ValidateEnumerator(VkCommandBufferUsageFlags const &enumerator) { 1084 VkCommandBufferUsageFlags allFlags = 1085 (VkCommandBufferUsageFlags)(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT | VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT | 1086 VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT); 1087 if (enumerator & (~allFlags)) { 1088 return false; 1089 } 1090 1091 return true; 1092} 1093 1094static std::string EnumeratorString(VkCommandBufferUsageFlags const &enumerator) { 1095 if (!ValidateEnumerator(enumerator)) { 1096 return "unrecognized enumerator"; 1097 } 1098 1099 std::vector<std::string> strings; 1100 if (enumerator & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) { 1101 strings.push_back("VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"); 1102 } 1103 if (enumerator & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) { 1104 strings.push_back("VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"); 1105 } 1106 if (enumerator & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) { 1107 strings.push_back("VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"); 1108 } 1109 1110 std::string enumeratorString; 1111 for (auto const &string : strings) { 1112 enumeratorString += string; 1113 1114 if (string != strings.back()) { 1115 enumeratorString += '|'; 1116 } 1117 } 1118 1119 return enumeratorString; 1120} 1121 1122static bool ValidateEnumerator(VkCommandBufferResetFlagBits const &enumerator) { 1123 VkCommandBufferResetFlagBits allFlags = (VkCommandBufferResetFlagBits)(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); 1124 if (enumerator & (~allFlags)) { 1125 return false; 1126 } 1127 1128 return true; 1129} 1130 1131static std::string EnumeratorString(VkCommandBufferResetFlagBits const &enumerator) { 1132 if (!ValidateEnumerator(enumerator)) { 1133 return "unrecognized enumerator"; 1134 } 1135 1136 std::vector<std::string> strings; 1137 if (enumerator & VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT) { 1138 strings.push_back("VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"); 1139 } 1140 1141 std::string enumeratorString; 1142 for (auto const &string : strings) { 1143 enumeratorString += string; 1144 1145 if (string != strings.back()) { 1146 enumeratorString += '|'; 1147 } 1148 } 1149 1150 return enumeratorString; 1151} 1152 1153static bool ValidateEnumerator(VkImageAspectFlagBits const &enumerator) { 1154 VkImageAspectFlagBits allFlags = (VkImageAspectFlagBits)(VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 1155 VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT); 1156 if (enumerator & (~allFlags)) { 1157 return false; 1158 } 1159 1160 return true; 1161} 1162 1163static std::string EnumeratorString(VkImageAspectFlagBits const &enumerator) { 1164 if (!ValidateEnumerator(enumerator)) { 1165 return "unrecognized enumerator"; 1166 } 1167 1168 std::vector<std::string> strings; 1169 if (enumerator & VK_IMAGE_ASPECT_METADATA_BIT) { 1170 strings.push_back("VK_IMAGE_ASPECT_METADATA_BIT"); 1171 } 1172 if (enumerator & VK_IMAGE_ASPECT_STENCIL_BIT) { 1173 strings.push_back("VK_IMAGE_ASPECT_STENCIL_BIT"); 1174 } 1175 if (enumerator & VK_IMAGE_ASPECT_DEPTH_BIT) { 1176 strings.push_back("VK_IMAGE_ASPECT_DEPTH_BIT"); 1177 } 1178 if (enumerator & VK_IMAGE_ASPECT_COLOR_BIT) { 1179 strings.push_back("VK_IMAGE_ASPECT_COLOR_BIT"); 1180 } 1181 1182 std::string enumeratorString; 1183 for (auto const &string : strings) { 1184 enumeratorString += string; 1185 1186 if (string != strings.back()) { 1187 enumeratorString += '|'; 1188 } 1189 } 1190 1191 return enumeratorString; 1192} 1193 1194static bool ValidateEnumerator(VkQueryControlFlagBits const &enumerator) { 1195 VkQueryControlFlagBits allFlags = (VkQueryControlFlagBits)(VK_QUERY_CONTROL_PRECISE_BIT); 1196 if (enumerator & (~allFlags)) { 1197 return false; 1198 } 1199 1200 return true; 1201} 1202 1203static std::string EnumeratorString(VkQueryControlFlagBits const &enumerator) { 1204 if (!ValidateEnumerator(enumerator)) { 1205 return "unrecognized enumerator"; 1206 } 1207 1208 std::vector<std::string> strings; 1209 if (enumerator & VK_QUERY_CONTROL_PRECISE_BIT) { 1210 strings.push_back("VK_QUERY_CONTROL_PRECISE_BIT"); 1211 } 1212 1213 std::string enumeratorString; 1214 for (auto const &string : strings) { 1215 enumeratorString += string; 1216 1217 if (string != strings.back()) { 1218 enumeratorString += '|'; 1219 } 1220 } 1221 1222 return enumeratorString; 1223} 1224 1225static const int MaxParamCheckerStringLength = 256; 1226 1227static bool validate_string(debug_report_data *report_data, const char *apiName, const char *stringName, 1228 const char *validateString) { 1229 assert(apiName != nullptr); 1230 assert(stringName != nullptr); 1231 assert(validateString != nullptr); 1232 1233 bool skip_call = false; 1234 1235 VkStringErrorFlags result = vk_string_validate(MaxParamCheckerStringLength, validateString); 1236 1237 if (result == VK_STRING_ERROR_NONE) { 1238 return skip_call; 1239 } else if (result & VK_STRING_ERROR_LENGTH) { 1240 skip_call = 1241 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1242 LayerName, "%s: string %s exceeds max length %d", apiName, stringName, MaxParamCheckerStringLength); 1243 } else if (result & VK_STRING_ERROR_BAD_DATA) { 1244 skip_call = 1245 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1246 LayerName, "%s: string %s contains invalid characters or is badly formed", apiName, stringName); 1247 } 1248 return skip_call; 1249} 1250 1251static bool validate_queue_family_index(layer_data *device_data, const char *function_name, const char *parameter_name, 1252 uint32_t index) { 1253 assert(device_data != nullptr); 1254 debug_report_data *report_data = device_data->report_data; 1255 bool skip_call = false; 1256 1257 if (index == VK_QUEUE_FAMILY_IGNORED) { 1258 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName, 1259 "%s: %s cannot be VK_QUEUE_FAMILY_IGNORED.", function_name, parameter_name); 1260 } else { 1261 const auto &queue_data = device_data->queueFamilyIndexMap.find(index); 1262 if (queue_data == device_data->queueFamilyIndexMap.end()) { 1263 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1264 LayerName, "%s: %s (%d) must be one of the indices specified when the device was created, via " 1265 "the VkDeviceQueueCreateInfo structure.", 1266 function_name, parameter_name, index); 1267 return false; 1268 } 1269 } 1270 1271 return skip_call; 1272} 1273 1274static bool validate_queue_family_indices(layer_data *device_data, const char *function_name, const char *parameter_name, 1275 const uint32_t count, const uint32_t *indices) { 1276 assert(device_data != nullptr); 1277 debug_report_data *report_data = device_data->report_data; 1278 bool skip_call = false; 1279 1280 if (indices != nullptr) { 1281 for (uint32_t i = 0; i < count; i++) { 1282 if (indices[i] == VK_QUEUE_FAMILY_IGNORED) { 1283 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1284 LayerName, "%s: %s[%d] cannot be VK_QUEUE_FAMILY_IGNORED.", function_name, parameter_name, i); 1285 } else { 1286 const auto &queue_data = device_data->queueFamilyIndexMap.find(indices[i]); 1287 if (queue_data == device_data->queueFamilyIndexMap.end()) { 1288 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 1289 LayerName, "%s: %s[%d] (%d) must be one of the indices specified when the device was " 1290 "created, via the VkDeviceQueueCreateInfo structure.", 1291 function_name, parameter_name, i, indices[i]); 1292 return false; 1293 } 1294 } 1295 } 1296 } 1297 1298 return skip_call; 1299} 1300 1301static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance); 1302 1303VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, 1304 VkInstance *pInstance) { 1305 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1306 1307 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 1308 assert(chain_info != nullptr); 1309 assert(chain_info->u.pLayerInfo != nullptr); 1310 1311 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 1312 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance"); 1313 if (fpCreateInstance == NULL) { 1314 return VK_ERROR_INITIALIZATION_FAILED; 1315 } 1316 1317 // Advance the link info for the next element on the chain 1318 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 1319 1320 result = fpCreateInstance(pCreateInfo, pAllocator, pInstance); 1321 1322 if (result == VK_SUCCESS) { 1323 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map); 1324 assert(my_instance_data != nullptr); 1325 1326 VkLayerInstanceDispatchTable *pTable = initInstanceTable(*pInstance, fpGetInstanceProcAddr, pc_instance_table_map); 1327 1328 my_instance_data->instance = *pInstance; 1329 my_instance_data->report_data = debug_report_create_instance(pTable, *pInstance, pCreateInfo->enabledExtensionCount, 1330 pCreateInfo->ppEnabledExtensionNames); 1331 1332 // Look for one or more debug report create info structures 1333 // and setup a callback(s) for each one found. 1334 if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &my_instance_data->num_tmp_callbacks, 1335 &my_instance_data->tmp_dbg_create_infos, &my_instance_data->tmp_callbacks)) { 1336 if (my_instance_data->num_tmp_callbacks > 0) { 1337 // Setup the temporary callback(s) here to catch early issues: 1338 if (layer_enable_tmp_callbacks(my_instance_data->report_data, my_instance_data->num_tmp_callbacks, 1339 my_instance_data->tmp_dbg_create_infos, my_instance_data->tmp_callbacks)) { 1340 // Failure of setting up one or more of the callback. 1341 // Therefore, clean up and don't use those callbacks: 1342 layer_free_tmp_callbacks(my_instance_data->tmp_dbg_create_infos, my_instance_data->tmp_callbacks); 1343 my_instance_data->num_tmp_callbacks = 0; 1344 } 1345 } 1346 } 1347 1348 init_parameter_validation(my_instance_data, pAllocator); 1349 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance); 1350 1351 // Ordinarily we'd check these before calling down the chain, but none of the layer 1352 // support is in place until now, if we survive we can report the issue now. 1353 parameter_validation_vkCreateInstance(my_instance_data->report_data, pCreateInfo, pAllocator, pInstance); 1354 1355 if (pCreateInfo->pApplicationInfo) { 1356 if (pCreateInfo->pApplicationInfo->pApplicationName) { 1357 validate_string(my_instance_data->report_data, "vkCreateInstance", 1358 "pCreateInfo->VkApplicationInfo->pApplicationName", 1359 pCreateInfo->pApplicationInfo->pApplicationName); 1360 } 1361 1362 if (pCreateInfo->pApplicationInfo->pEngineName) { 1363 validate_string(my_instance_data->report_data, "vkCreateInstance", "pCreateInfo->VkApplicationInfo->pEngineName", 1364 pCreateInfo->pApplicationInfo->pEngineName); 1365 } 1366 } 1367 1368 // Disable the tmp callbacks: 1369 if (my_instance_data->num_tmp_callbacks > 0) { 1370 layer_disable_tmp_callbacks(my_instance_data->report_data, my_instance_data->num_tmp_callbacks, 1371 my_instance_data->tmp_callbacks); 1372 } 1373 } 1374 1375 return result; 1376} 1377 1378VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) { 1379 // Grab the key before the instance is destroyed. 1380 dispatch_key key = get_dispatch_key(instance); 1381 bool skip_call = false; 1382 layer_data *my_data = get_my_data_ptr(key, layer_data_map); 1383 assert(my_data != NULL); 1384 1385 // Enable the temporary callback(s) here to catch vkDestroyInstance issues: 1386 bool callback_setup = false; 1387 if (my_data->num_tmp_callbacks > 0) { 1388 if (!layer_enable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_dbg_create_infos, 1389 my_data->tmp_callbacks)) { 1390 callback_setup = true; 1391 } 1392 } 1393 1394 skip_call |= parameter_validation_vkDestroyInstance(my_data->report_data, pAllocator); 1395 1396 // Disable and cleanup the temporary callback(s): 1397 if (callback_setup) { 1398 layer_disable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_callbacks); 1399 } 1400 if (my_data->num_tmp_callbacks > 0) { 1401 layer_free_tmp_callbacks(my_data->tmp_dbg_create_infos, my_data->tmp_callbacks); 1402 my_data->num_tmp_callbacks = 0; 1403 } 1404 1405 if (!skip_call) { 1406 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 1407 pTable->DestroyInstance(instance, pAllocator); 1408 1409 // Clean up logging callback, if any 1410 while (my_data->logging_callback.size() > 0) { 1411 VkDebugReportCallbackEXT callback = my_data->logging_callback.back(); 1412 layer_destroy_msg_callback(my_data->report_data, callback, pAllocator); 1413 my_data->logging_callback.pop_back(); 1414 } 1415 1416 layer_debug_report_destroy_instance(mid(instance)); 1417 layer_data_map.erase(pTable); 1418 1419 pc_instance_table_map.erase(key); 1420 layer_data_map.erase(key); 1421 } 1422} 1423 1424VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, 1425 VkPhysicalDevice *pPhysicalDevices) { 1426 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1427 bool skip_call = false; 1428 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 1429 assert(my_data != NULL); 1430 1431 skip_call |= parameter_validation_vkEnumeratePhysicalDevices(my_data->report_data, pPhysicalDeviceCount, pPhysicalDevices); 1432 1433 if (!skip_call) { 1434 result = get_dispatch_table(pc_instance_table_map, instance) 1435 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); 1436 1437 validate_result(my_data->report_data, "vkEnumeratePhysicalDevices", result); 1438 if ((result == VK_SUCCESS) && (NULL != pPhysicalDevices)) { 1439 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) { 1440 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(pPhysicalDevices[i]), layer_data_map); 1441 // Save the supported features for each physical device 1442 VkLayerInstanceDispatchTable *disp_table = get_dispatch_table(pc_instance_table_map, pPhysicalDevices[i]); 1443 disp_table->GetPhysicalDeviceFeatures(pPhysicalDevices[i], &(phy_dev_data->physical_device_features)); 1444 } 1445 } 1446 } 1447 return result; 1448} 1449 1450VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) { 1451 bool skip_call = false; 1452 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1453 assert(my_data != NULL); 1454 1455 skip_call |= parameter_validation_vkGetPhysicalDeviceFeatures(my_data->report_data, pFeatures); 1456 1457 if (!skip_call) { 1458 get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures); 1459 } 1460} 1461 1462VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 1463 VkFormatProperties *pFormatProperties) { 1464 bool skip_call = false; 1465 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1466 assert(my_data != NULL); 1467 1468 skip_call |= parameter_validation_vkGetPhysicalDeviceFormatProperties(my_data->report_data, format, pFormatProperties); 1469 1470 if (!skip_call) { 1471 get_dispatch_table(pc_instance_table_map, physicalDevice) 1472 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); 1473 } 1474} 1475 1476VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 1477 VkImageType type, VkImageTiling tiling, 1478 VkImageUsageFlags usage, VkImageCreateFlags flags, 1479 VkImageFormatProperties *pImageFormatProperties) { 1480 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1481 bool skip_call = false; 1482 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1483 assert(my_data != NULL); 1484 1485 skip_call |= parameter_validation_vkGetPhysicalDeviceImageFormatProperties(my_data->report_data, format, type, tiling, usage, 1486 flags, pImageFormatProperties); 1487 1488 if (!skip_call) { 1489 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 1490 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, 1491 pImageFormatProperties); 1492 1493 validate_result(my_data->report_data, "vkGetPhysicalDeviceImageFormatProperties", result); 1494 } 1495 1496 return result; 1497} 1498 1499VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) { 1500 bool skip_call = false; 1501 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1502 assert(my_data != NULL); 1503 1504 skip_call |= parameter_validation_vkGetPhysicalDeviceProperties(my_data->report_data, pProperties); 1505 1506 if (!skip_call) { 1507 get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties); 1508 } 1509} 1510 1511VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, 1512 uint32_t *pQueueFamilyPropertyCount, 1513 VkQueueFamilyProperties *pQueueFamilyProperties) { 1514 bool skip_call = false; 1515 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1516 assert(my_data != NULL); 1517 1518 skip_call |= parameter_validation_vkGetPhysicalDeviceQueueFamilyProperties(my_data->report_data, pQueueFamilyPropertyCount, 1519 pQueueFamilyProperties); 1520 1521 if (!skip_call) { 1522 get_dispatch_table(pc_instance_table_map, physicalDevice) 1523 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); 1524 } 1525} 1526 1527VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, 1528 VkPhysicalDeviceMemoryProperties *pMemoryProperties) { 1529 bool skip_call = false; 1530 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1531 assert(my_data != NULL); 1532 1533 skip_call |= parameter_validation_vkGetPhysicalDeviceMemoryProperties(my_data->report_data, pMemoryProperties); 1534 1535 if (!skip_call) { 1536 get_dispatch_table(pc_instance_table_map, physicalDevice) 1537 ->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); 1538 } 1539} 1540 1541void validateDeviceCreateInfo(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, 1542 const std::vector<VkQueueFamilyProperties> properties) { 1543 std::unordered_set<uint32_t> set; 1544 1545 if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) { 1546 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) { 1547 if (set.count(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex)) { 1548 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1549 INVALID_USAGE, LayerName, 1550 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex, is not unique within this " 1551 "structure.", 1552 i); 1553 } else { 1554 set.insert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex); 1555 } 1556 1557 if (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities != nullptr) { 1558 for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount; ++j) { 1559 if ((pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] < 0.f) || 1560 (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] > 1.f)) { 1561 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 1562 __LINE__, INVALID_USAGE, LayerName, 1563 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->pQueuePriorities[%d], must be " 1564 "between 0 and 1. Actual value is %f", 1565 i, j, pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j]); 1566 } 1567 } 1568 } 1569 1570 if (pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex >= properties.size()) { 1571 log_msg( 1572 mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1573 INVALID_USAGE, LayerName, 1574 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex cannot be more than the number " 1575 "of queue families.", 1576 i); 1577 } else if (pCreateInfo->pQueueCreateInfos[i].queueCount > 1578 properties[pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex].queueCount) { 1579 log_msg( 1580 mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1581 INVALID_USAGE, LayerName, 1582 "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueCount cannot be more than the number of " 1583 "queues for the given family index.", 1584 i); 1585 } 1586 } 1587 } 1588} 1589 1590static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) { 1591 VkLayerInstanceDispatchTable *dispatch_table = get_dispatch_table(pc_instance_table_map, instance); 1592 1593 instance_extension_map[dispatch_table] = {}; 1594 1595 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 1596 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) { 1597 instance_extension_map[dispatch_table].wsi_enabled = true; 1598 } 1599#ifdef VK_USE_PLATFORM_XLIB_KHR 1600 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) { 1601 instance_extension_map[dispatch_table].xlib_enabled = true; 1602 } 1603#endif 1604#ifdef VK_USE_PLATFORM_XCB_KHR 1605 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) { 1606 instance_extension_map[dispatch_table].xcb_enabled = true; 1607 } 1608#endif 1609#ifdef VK_USE_PLATFORM_WAYLAND_KHR 1610 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) { 1611 instance_extension_map[dispatch_table].wayland_enabled = true; 1612 } 1613#endif 1614#ifdef VK_USE_PLATFORM_MIR_KHR 1615 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) { 1616 instance_extension_map[dispatch_table].mir_enabled = true; 1617 } 1618#endif 1619#ifdef VK_USE_PLATFORM_ANDROID_KHR 1620 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) { 1621 instance_extension_map[dispatch_table].android_enabled = true; 1622 } 1623#endif 1624#ifdef VK_USE_PLATFORM_WIN32_KHR 1625 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) { 1626 instance_extension_map[dispatch_table].win32_enabled = true; 1627 } 1628#endif 1629 } 1630} 1631 1632static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) { 1633 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1634 device_data->wsi_enabled = false; 1635 1636 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 1637 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) { 1638 device_data->wsi_enabled = true; 1639 } 1640 } 1641} 1642 1643void storeCreateDeviceData(VkDevice device, const VkDeviceCreateInfo *pCreateInfo) { 1644 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1645 1646 if ((pCreateInfo != nullptr) && (pCreateInfo->pQueueCreateInfos != nullptr)) { 1647 for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) { 1648 my_device_data->queueFamilyIndexMap.insert( 1649 std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount)); 1650 } 1651 } 1652} 1653 1654VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, 1655 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { 1656 /* 1657 * NOTE: We do not validate physicalDevice or any dispatchable 1658 * object as the first parameter. We couldn't get here if it was wrong! 1659 */ 1660 1661 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1662 bool skip_call = false; 1663 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 1664 assert(my_instance_data != nullptr); 1665 1666 skip_call |= parameter_validation_vkCreateDevice(my_instance_data->report_data, pCreateInfo, pAllocator, pDevice); 1667 1668 if (pCreateInfo != NULL) { 1669 if ((pCreateInfo->enabledLayerCount > 0) && (pCreateInfo->ppEnabledLayerNames != NULL)) { 1670 for (size_t i = 0; i < pCreateInfo->enabledLayerCount; i++) { 1671 skip_call |= validate_string(my_instance_data->report_data, "vkCreateDevice", "pCreateInfo->ppEnabledLayerNames", 1672 pCreateInfo->ppEnabledLayerNames[i]); 1673 } 1674 } 1675 1676 if ((pCreateInfo->enabledExtensionCount > 0) && (pCreateInfo->ppEnabledExtensionNames != NULL)) { 1677 for (size_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 1678 skip_call |= validate_string(my_instance_data->report_data, "vkCreateDevice", 1679 "pCreateInfo->ppEnabledExtensionNames", pCreateInfo->ppEnabledExtensionNames[i]); 1680 } 1681 } 1682 } 1683 1684 if (!skip_call) { 1685 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 1686 assert(chain_info != nullptr); 1687 assert(chain_info->u.pLayerInfo != nullptr); 1688 1689 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 1690 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr; 1691 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice"); 1692 if (fpCreateDevice == NULL) { 1693 return VK_ERROR_INITIALIZATION_FAILED; 1694 } 1695 1696 // Advance the link info for the next element on the chain 1697 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 1698 1699 result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice); 1700 1701 validate_result(my_instance_data->report_data, "vkCreateDevice", result); 1702 1703 if (result == VK_SUCCESS) { 1704 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map); 1705 assert(my_device_data != nullptr); 1706 1707 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice); 1708 initDeviceTable(*pDevice, fpGetDeviceProcAddr, pc_device_table_map); 1709 1710 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice); 1711 1712 uint32_t count; 1713 VkLayerInstanceDispatchTable *instance_dispatch_table = get_dispatch_table(pc_instance_table_map, physicalDevice); 1714 instance_dispatch_table->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, nullptr); 1715 std::vector<VkQueueFamilyProperties> properties(count); 1716 instance_dispatch_table->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, &properties[0]); 1717 1718 validateDeviceCreateInfo(physicalDevice, pCreateInfo, properties); 1719 storeCreateDeviceData(*pDevice, pCreateInfo); 1720 1721 // Query and save physical device limits for this device 1722 VkPhysicalDeviceProperties device_properties = {}; 1723 instance_dispatch_table->GetPhysicalDeviceProperties(physicalDevice, &device_properties); 1724 memcpy(&my_device_data->device_limits, &device_properties.limits, sizeof(VkPhysicalDeviceLimits)); 1725 my_device_data->physical_device = physicalDevice; 1726 1727 // Save app-enabled features in this device's layer_data structure 1728 if (pCreateInfo->pEnabledFeatures) { 1729 my_device_data->physical_device_features = *pCreateInfo->pEnabledFeatures; 1730 } else { 1731 memset(&my_device_data->physical_device_features, 0, sizeof(VkPhysicalDeviceFeatures)); 1732 } 1733 } 1734 } 1735 1736 return result; 1737} 1738 1739VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { 1740 dispatch_key key = get_dispatch_key(device); 1741 bool skip_call = false; 1742 layer_data *my_data = get_my_data_ptr(key, layer_data_map); 1743 assert(my_data != NULL); 1744 1745 skip_call |= parameter_validation_vkDestroyDevice(my_data->report_data, pAllocator); 1746 1747 if (!skip_call) { 1748 layer_debug_report_destroy_device(device); 1749 1750#if DISPATCH_MAP_DEBUG 1751 fprintf(stderr, "Device: 0x%p, key: 0x%p\n", device, key); 1752#endif 1753 1754 get_dispatch_table(pc_device_table_map, device)->DestroyDevice(device, pAllocator); 1755 pc_device_table_map.erase(key); 1756 layer_data_map.erase(key); 1757 } 1758} 1759 1760bool PreGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex) { 1761 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1762 assert(my_device_data != nullptr); 1763 1764 validate_queue_family_index(my_device_data, "vkGetDeviceQueue", "queueFamilyIndex", queueFamilyIndex); 1765 1766 const auto &queue_data = my_device_data->queueFamilyIndexMap.find(queueFamilyIndex); 1767 if (queue_data->second <= queueIndex) { 1768 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, INVALID_USAGE, 1769 LayerName, 1770 "VkGetDeviceQueue parameter, uint32_t queueIndex %d, must be less than the number of queues given when the device " 1771 "was created.", 1772 queueIndex); 1773 return false; 1774 } 1775 return true; 1776} 1777 1778VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) { 1779 bool skip_call = false; 1780 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1781 assert(my_data != NULL); 1782 1783 skip_call |= parameter_validation_vkGetDeviceQueue(my_data->report_data, queueFamilyIndex, queueIndex, pQueue); 1784 1785 if (!skip_call) { 1786 PreGetDeviceQueue(device, queueFamilyIndex, queueIndex); 1787 1788 get_dispatch_table(pc_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); 1789 } 1790} 1791 1792VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) { 1793 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1794 bool skip_call = false; 1795 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 1796 assert(my_data != NULL); 1797 1798 skip_call |= parameter_validation_vkQueueSubmit(my_data->report_data, submitCount, pSubmits, fence); 1799 1800 if (!skip_call) { 1801 result = get_dispatch_table(pc_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence); 1802 1803 validate_result(my_data->report_data, "vkQueueSubmit", result); 1804 } 1805 1806 return result; 1807} 1808 1809VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) { 1810 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 1811 assert(my_data != NULL); 1812 1813 VkResult result = get_dispatch_table(pc_device_table_map, queue)->QueueWaitIdle(queue); 1814 1815 validate_result(my_data->report_data, "vkQueueWaitIdle", result); 1816 1817 return result; 1818} 1819 1820VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) { 1821 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1822 assert(my_data != NULL); 1823 1824 VkResult result = get_dispatch_table(pc_device_table_map, device)->DeviceWaitIdle(device); 1825 1826 validate_result(my_data->report_data, "vkDeviceWaitIdle", result); 1827 1828 return result; 1829} 1830 1831VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, 1832 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) { 1833 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1834 bool skip_call = false; 1835 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1836 assert(my_data != NULL); 1837 1838 skip_call |= parameter_validation_vkAllocateMemory(my_data->report_data, pAllocateInfo, pAllocator, pMemory); 1839 1840 if (!skip_call) { 1841 result = get_dispatch_table(pc_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory); 1842 1843 validate_result(my_data->report_data, "vkAllocateMemory", result); 1844 } 1845 1846 return result; 1847} 1848 1849VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) { 1850 bool skip_call = false; 1851 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1852 assert(my_data != NULL); 1853 1854 skip_call |= parameter_validation_vkFreeMemory(my_data->report_data, memory, pAllocator); 1855 1856 if (!skip_call) { 1857 get_dispatch_table(pc_device_table_map, device)->FreeMemory(device, memory, pAllocator); 1858 } 1859} 1860 1861VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, 1862 VkMemoryMapFlags flags, void **ppData) { 1863 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1864 bool skip_call = false; 1865 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1866 assert(my_data != NULL); 1867 1868 skip_call |= parameter_validation_vkMapMemory(my_data->report_data, memory, offset, size, flags, ppData); 1869 1870 if (!skip_call) { 1871 result = get_dispatch_table(pc_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData); 1872 1873 validate_result(my_data->report_data, "vkMapMemory", result); 1874 } 1875 1876 return result; 1877} 1878 1879VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) { 1880 bool skip_call = false; 1881 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1882 assert(my_data != NULL); 1883 1884 skip_call |= parameter_validation_vkUnmapMemory(my_data->report_data, memory); 1885 1886 if (!skip_call) { 1887 get_dispatch_table(pc_device_table_map, device)->UnmapMemory(device, memory); 1888 } 1889} 1890 1891VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, 1892 const VkMappedMemoryRange *pMemoryRanges) { 1893 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1894 bool skip_call = false; 1895 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1896 assert(my_data != NULL); 1897 1898 skip_call |= parameter_validation_vkFlushMappedMemoryRanges(my_data->report_data, memoryRangeCount, pMemoryRanges); 1899 1900 if (!skip_call) { 1901 result = get_dispatch_table(pc_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 1902 1903 validate_result(my_data->report_data, "vkFlushMappedMemoryRanges", result); 1904 } 1905 1906 return result; 1907} 1908 1909VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, 1910 const VkMappedMemoryRange *pMemoryRanges) { 1911 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1912 bool skip_call = false; 1913 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1914 assert(my_data != NULL); 1915 1916 skip_call |= parameter_validation_vkInvalidateMappedMemoryRanges(my_data->report_data, memoryRangeCount, pMemoryRanges); 1917 1918 if (!skip_call) { 1919 result = 1920 get_dispatch_table(pc_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 1921 1922 validate_result(my_data->report_data, "vkInvalidateMappedMemoryRanges", result); 1923 } 1924 1925 return result; 1926} 1927 1928VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, 1929 VkDeviceSize *pCommittedMemoryInBytes) { 1930 bool skip_call = false; 1931 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1932 assert(my_data != NULL); 1933 1934 skip_call |= parameter_validation_vkGetDeviceMemoryCommitment(my_data->report_data, memory, pCommittedMemoryInBytes); 1935 1936 if (!skip_call) { 1937 get_dispatch_table(pc_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); 1938 } 1939} 1940 1941VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, 1942 VkDeviceSize memoryOffset) { 1943 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1944 bool skip_call = false; 1945 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1946 assert(my_data != NULL); 1947 1948 skip_call |= parameter_validation_vkBindBufferMemory(my_data->report_data, buffer, memory, memoryOffset); 1949 1950 if (!skip_call) { 1951 result = get_dispatch_table(pc_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset); 1952 1953 validate_result(my_data->report_data, "vkBindBufferMemory", result); 1954 } 1955 1956 return result; 1957} 1958 1959VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) { 1960 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 1961 bool skip_call = false; 1962 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1963 assert(my_data != NULL); 1964 1965 skip_call |= parameter_validation_vkBindImageMemory(my_data->report_data, image, memory, memoryOffset); 1966 1967 if (!skip_call) { 1968 result = get_dispatch_table(pc_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset); 1969 1970 validate_result(my_data->report_data, "vkBindImageMemory", result); 1971 } 1972 1973 return result; 1974} 1975 1976VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, 1977 VkMemoryRequirements *pMemoryRequirements) { 1978 bool skip_call = false; 1979 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1980 assert(my_data != NULL); 1981 1982 skip_call |= parameter_validation_vkGetBufferMemoryRequirements(my_data->report_data, buffer, pMemoryRequirements); 1983 1984 if (!skip_call) { 1985 get_dispatch_table(pc_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements); 1986 } 1987} 1988 1989VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) { 1990 bool skip_call = false; 1991 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1992 assert(my_data != NULL); 1993 1994 skip_call |= parameter_validation_vkGetImageMemoryRequirements(my_data->report_data, image, pMemoryRequirements); 1995 1996 if (!skip_call) { 1997 get_dispatch_table(pc_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements); 1998 } 1999} 2000 2001bool PostGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pNumRequirements, 2002 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { 2003 if (pSparseMemoryRequirements != nullptr) { 2004 if ((pSparseMemoryRequirements->formatProperties.aspectMask & 2005 (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 2006 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 2007 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2008 UNRECOGNIZED_VALUE, LayerName, 2009 "vkGetImageSparseMemoryRequirements parameter, VkImageAspect " 2010 "pSparseMemoryRequirements->formatProperties.aspectMask, is an unrecognized enumerator"); 2011 return false; 2012 } 2013 } 2014 2015 return true; 2016} 2017 2018VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, 2019 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { 2020 bool skip_call = false; 2021 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2022 assert(my_data != NULL); 2023 2024 skip_call |= parameter_validation_vkGetImageSparseMemoryRequirements(my_data->report_data, image, pSparseMemoryRequirementCount, 2025 pSparseMemoryRequirements); 2026 2027 if (!skip_call) { 2028 get_dispatch_table(pc_device_table_map, device) 2029 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 2030 2031 PostGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 2032 } 2033} 2034 2035bool PostGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, 2036 VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, 2037 uint32_t *pNumProperties, VkSparseImageFormatProperties *pProperties) { 2038 if (pProperties != nullptr) { 2039 if ((pProperties->aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 2040 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 2041 log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 1, 2042 LayerName, 2043 "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkImageAspect pProperties->aspectMask, is an " 2044 "unrecognized enumerator"); 2045 return false; 2046 } 2047 } 2048 2049 return true; 2050} 2051 2052VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 2053 VkImageType type, VkSampleCountFlagBits samples, 2054 VkImageUsageFlags usage, VkImageTiling tiling, 2055 uint32_t *pPropertyCount, 2056 VkSparseImageFormatProperties *pProperties) { 2057 bool skip_call = false; 2058 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 2059 assert(my_data != NULL); 2060 2061 skip_call |= parameter_validation_vkGetPhysicalDeviceSparseImageFormatProperties(my_data->report_data, format, type, samples, 2062 usage, tiling, pPropertyCount, pProperties); 2063 2064 if (!skip_call) { 2065 get_dispatch_table(pc_instance_table_map, physicalDevice) 2066 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, 2067 pProperties); 2068 2069 PostGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, 2070 pProperties); 2071 } 2072} 2073 2074VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, 2075 VkFence fence) { 2076 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2077 bool skip_call = false; 2078 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 2079 assert(my_data != NULL); 2080 2081 skip_call |= parameter_validation_vkQueueBindSparse(my_data->report_data, bindInfoCount, pBindInfo, fence); 2082 2083 if (!skip_call) { 2084 result = get_dispatch_table(pc_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence); 2085 2086 validate_result(my_data->report_data, "vkQueueBindSparse", result); 2087 } 2088 2089 return result; 2090} 2091 2092VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, 2093 const VkAllocationCallbacks *pAllocator, VkFence *pFence) { 2094 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2095 bool skip_call = false; 2096 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2097 assert(my_data != NULL); 2098 2099 skip_call |= parameter_validation_vkCreateFence(my_data->report_data, pCreateInfo, pAllocator, pFence); 2100 2101 if (!skip_call) { 2102 result = get_dispatch_table(pc_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence); 2103 2104 validate_result(my_data->report_data, "vkCreateFence", result); 2105 } 2106 2107 return result; 2108} 2109 2110VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) { 2111 bool skip_call = false; 2112 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2113 assert(my_data != NULL); 2114 2115 skip_call |= parameter_validation_vkDestroyFence(my_data->report_data, fence, pAllocator); 2116 2117 if (!skip_call) { 2118 get_dispatch_table(pc_device_table_map, device)->DestroyFence(device, fence, pAllocator); 2119 } 2120} 2121 2122VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) { 2123 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2124 bool skip_call = false; 2125 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2126 assert(my_data != NULL); 2127 2128 skip_call |= parameter_validation_vkResetFences(my_data->report_data, fenceCount, pFences); 2129 2130 if (!skip_call) { 2131 result = get_dispatch_table(pc_device_table_map, device)->ResetFences(device, fenceCount, pFences); 2132 2133 validate_result(my_data->report_data, "vkResetFences", result); 2134 } 2135 2136 return result; 2137} 2138 2139VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) { 2140 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2141 bool skip_call = false; 2142 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2143 assert(my_data != NULL); 2144 2145 skip_call |= parameter_validation_vkGetFenceStatus(my_data->report_data, fence); 2146 2147 if (!skip_call) { 2148 result = get_dispatch_table(pc_device_table_map, device)->GetFenceStatus(device, fence); 2149 2150 validate_result(my_data->report_data, "vkGetFenceStatus", result); 2151 } 2152 2153 return result; 2154} 2155 2156VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, 2157 uint64_t timeout) { 2158 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2159 bool skip_call = false; 2160 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2161 assert(my_data != NULL); 2162 2163 skip_call |= parameter_validation_vkWaitForFences(my_data->report_data, fenceCount, pFences, waitAll, timeout); 2164 2165 if (!skip_call) { 2166 result = get_dispatch_table(pc_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout); 2167 2168 validate_result(my_data->report_data, "vkWaitForFences", result); 2169 } 2170 2171 return result; 2172} 2173 2174VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, 2175 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) { 2176 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2177 bool skip_call = false; 2178 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2179 assert(my_data != NULL); 2180 2181 skip_call |= parameter_validation_vkCreateSemaphore(my_data->report_data, pCreateInfo, pAllocator, pSemaphore); 2182 2183 if (!skip_call) { 2184 result = get_dispatch_table(pc_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); 2185 2186 validate_result(my_data->report_data, "vkCreateSemaphore", result); 2187 } 2188 2189 return result; 2190} 2191 2192VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) { 2193 bool skip_call = false; 2194 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2195 assert(my_data != NULL); 2196 2197 skip_call |= parameter_validation_vkDestroySemaphore(my_data->report_data, semaphore, pAllocator); 2198 2199 if (!skip_call) { 2200 get_dispatch_table(pc_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator); 2201 } 2202} 2203 2204VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, 2205 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) { 2206 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2207 bool skip_call = false; 2208 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2209 assert(my_data != NULL); 2210 2211 skip_call |= parameter_validation_vkCreateEvent(my_data->report_data, pCreateInfo, pAllocator, pEvent); 2212 2213 if (!skip_call) { 2214 result = get_dispatch_table(pc_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent); 2215 2216 validate_result(my_data->report_data, "vkCreateEvent", result); 2217 } 2218 2219 return result; 2220} 2221 2222VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) { 2223 bool skip_call = false; 2224 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2225 assert(my_data != NULL); 2226 2227 skip_call |= parameter_validation_vkDestroyEvent(my_data->report_data, event, pAllocator); 2228 2229 if (!skip_call) { 2230 get_dispatch_table(pc_device_table_map, device)->DestroyEvent(device, event, pAllocator); 2231 } 2232} 2233 2234VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) { 2235 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2236 bool skip_call = false; 2237 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2238 assert(my_data != NULL); 2239 2240 skip_call |= parameter_validation_vkGetEventStatus(my_data->report_data, event); 2241 2242 if (!skip_call) { 2243 result = get_dispatch_table(pc_device_table_map, device)->GetEventStatus(device, event); 2244 2245 validate_result(my_data->report_data, "vkGetEventStatus", result); 2246 } 2247 2248 return result; 2249} 2250 2251VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) { 2252 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2253 bool skip_call = false; 2254 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2255 assert(my_data != NULL); 2256 2257 skip_call |= parameter_validation_vkSetEvent(my_data->report_data, event); 2258 2259 if (!skip_call) { 2260 result = get_dispatch_table(pc_device_table_map, device)->SetEvent(device, event); 2261 2262 validate_result(my_data->report_data, "vkSetEvent", result); 2263 } 2264 2265 return result; 2266} 2267 2268VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) { 2269 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2270 bool skip_call = false; 2271 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2272 assert(my_data != NULL); 2273 2274 skip_call |= parameter_validation_vkResetEvent(my_data->report_data, event); 2275 2276 if (!skip_call) { 2277 result = get_dispatch_table(pc_device_table_map, device)->ResetEvent(device, event); 2278 2279 validate_result(my_data->report_data, "vkResetEvent", result); 2280 } 2281 2282 return result; 2283} 2284 2285VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, 2286 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) { 2287 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2288 bool skip_call = false; 2289 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2290 assert(device_data != nullptr); 2291 debug_report_data *report_data = device_data->report_data; 2292 2293 skip_call |= parameter_validation_vkCreateQueryPool(device_data->report_data, pCreateInfo, pAllocator, pQueryPool); 2294 2295 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2296 if (pCreateInfo != nullptr) { 2297 // If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of 2298 // VkQueryPipelineStatisticFlagBits values 2299 if ((pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) && (pCreateInfo->pipelineStatistics != 0) && 2300 ((pCreateInfo->pipelineStatistics & (~AllVkQueryPipelineStatisticFlagBits)) != 0)) { 2301 skip_call |= 2302 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2303 UNRECOGNIZED_VALUE, LayerName, "vkCreateQueryPool: if pCreateInfo->queryType is " 2304 "VK_QUERY_TYPE_PIPELINE_STATISTICS, pCreateInfo->pipelineStatistics must be " 2305 "a valid combination of VkQueryPipelineStatisticFlagBits values"); 2306 } 2307 } 2308 2309 if (!skip_call) { 2310 result = get_dispatch_table(pc_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); 2311 2312 validate_result(report_data, "vkCreateQueryPool", result); 2313 } 2314 2315 return result; 2316} 2317 2318VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) { 2319 bool skip_call = false; 2320 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2321 assert(my_data != NULL); 2322 2323 skip_call |= parameter_validation_vkDestroyQueryPool(my_data->report_data, queryPool, pAllocator); 2324 2325 if (!skip_call) { 2326 get_dispatch_table(pc_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator); 2327 } 2328} 2329 2330VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, 2331 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) { 2332 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2333 bool skip_call = false; 2334 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2335 assert(my_data != NULL); 2336 2337 skip_call |= parameter_validation_vkGetQueryPoolResults(my_data->report_data, queryPool, firstQuery, queryCount, dataSize, 2338 pData, stride, flags); 2339 2340 if (!skip_call) { 2341 result = get_dispatch_table(pc_device_table_map, device) 2342 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); 2343 2344 validate_result(my_data->report_data, "vkGetQueryPoolResults", result); 2345 } 2346 2347 return result; 2348} 2349 2350VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, 2351 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) { 2352 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2353 bool skip_call = false; 2354 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2355 assert(device_data != nullptr); 2356 debug_report_data *report_data = device_data->report_data; 2357 2358 skip_call |= parameter_validation_vkCreateBuffer(report_data, pCreateInfo, pAllocator, pBuffer); 2359 2360 if (pCreateInfo != nullptr) { 2361 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2362 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) { 2363 // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 2364 if (pCreateInfo->queueFamilyIndexCount <= 1) { 2365 skip_call |= 2366 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2367 INVALID_USAGE, LayerName, "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2368 "pCreateInfo->queueFamilyIndexCount must be greater than 1"); 2369 } 2370 2371 // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of 2372 // queueFamilyIndexCount uint32_t values 2373 if (pCreateInfo->pQueueFamilyIndices == nullptr) { 2374 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2375 __LINE__, REQUIRED_PARAMETER, LayerName, 2376 "vkCreateBuffer: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2377 "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of " 2378 "pCreateInfo->queueFamilyIndexCount uint32_t values"); 2379 } 2380 2381 // Ensure that the queue family indices were specified at device creation 2382 skip_call |= validate_queue_family_indices(device_data, "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices", 2383 pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices); 2384 } 2385 } 2386 2387 if (!skip_call) { 2388 result = get_dispatch_table(pc_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer); 2389 2390 validate_result(report_data, "vkCreateBuffer", result); 2391 } 2392 2393 return result; 2394} 2395 2396VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) { 2397 bool skip_call = false; 2398 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2399 assert(my_data != NULL); 2400 2401 skip_call |= parameter_validation_vkDestroyBuffer(my_data->report_data, buffer, pAllocator); 2402 2403 if (!skip_call) { 2404 get_dispatch_table(pc_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator); 2405 } 2406} 2407 2408VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, 2409 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) { 2410 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2411 bool skip_call = false; 2412 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2413 assert(my_data != NULL); 2414 2415 skip_call |= parameter_validation_vkCreateBufferView(my_data->report_data, pCreateInfo, pAllocator, pView); 2416 2417 if (!skip_call) { 2418 result = get_dispatch_table(pc_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView); 2419 2420 validate_result(my_data->report_data, "vkCreateBufferView", result); 2421 } 2422 2423 return result; 2424} 2425 2426VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) { 2427 bool skip_call = false; 2428 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2429 assert(my_data != NULL); 2430 2431 skip_call |= parameter_validation_vkDestroyBufferView(my_data->report_data, bufferView, pAllocator); 2432 2433 if (!skip_call) { 2434 get_dispatch_table(pc_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator); 2435 } 2436} 2437 2438VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, 2439 const VkAllocationCallbacks *pAllocator, VkImage *pImage) { 2440 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2441 bool skip_call = false; 2442 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2443 assert(device_data != nullptr); 2444 debug_report_data *report_data = device_data->report_data; 2445 2446 skip_call |= parameter_validation_vkCreateImage(report_data, pCreateInfo, pAllocator, pImage); 2447 2448 if (pCreateInfo != nullptr) { 2449 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2450 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) { 2451 // If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1 2452 if (pCreateInfo->queueFamilyIndexCount <= 1) { 2453 skip_call |= 2454 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2455 INVALID_USAGE, LayerName, "vkCreateImage: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2456 "pCreateInfo->queueFamilyIndexCount must be greater than 1"); 2457 } 2458 2459 // If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a pointer to an array of 2460 // queueFamilyIndexCount uint32_t values 2461 if (pCreateInfo->pQueueFamilyIndices == nullptr) { 2462 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2463 __LINE__, REQUIRED_PARAMETER, LayerName, 2464 "vkCreateImage: if pCreateInfo->sharingMode is VK_SHARING_MODE_CONCURRENT, " 2465 "pCreateInfo->pQueueFamilyIndices must be a pointer to an array of " 2466 "pCreateInfo->queueFamilyIndexCount uint32_t values"); 2467 } 2468 2469 skip_call |= validate_queue_family_indices(device_data, "vkCreateImage", "pCreateInfo->pQueueFamilyIndices", 2470 pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices); 2471 } 2472 2473 // width, height, and depth members of extent must be greater than 0 2474 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.width", pCreateInfo->extent.width, 0u); 2475 skip_call |= 2476 ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.height", pCreateInfo->extent.height, 0u); 2477 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->extent.depth", pCreateInfo->extent.depth, 0u); 2478 2479 // mipLevels must be greater than 0 2480 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->mipLevels", pCreateInfo->mipLevels, 0u); 2481 2482 // arrayLayers must be greater than 0 2483 skip_call |= ValidateGreaterThan(report_data, "vkCreateImage", "pCreateInfo->arrayLayers", pCreateInfo->arrayLayers, 0u); 2484 2485 // If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1 2486 if ((pCreateInfo->imageType == VK_IMAGE_TYPE_1D) && (pCreateInfo->extent.height != 1) && (pCreateInfo->extent.depth != 1)) { 2487 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2488 LayerName, "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_1D, both " 2489 "pCreateInfo->extent.height and pCreateInfo->extent.depth must be 1"); 2490 } 2491 2492 if (pCreateInfo->imageType == VK_IMAGE_TYPE_2D) { 2493 // If imageType is VK_IMAGE_TYPE_2D and flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and 2494 // extent.height must be equal 2495 if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && 2496 (pCreateInfo->extent.width != pCreateInfo->extent.height)) { 2497 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2498 LayerName, "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_2D and " 2499 "pCreateInfo->flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, " 2500 "pCreateInfo->extent.width and pCreateInfo->extent.height must be equal"); 2501 } 2502 2503 if (pCreateInfo->extent.depth != 1) { 2504 skip_call |= 2505 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName, 2506 "vkCreateImage: if pCreateInfo->imageType is VK_IMAGE_TYPE_2D, pCreateInfo->extent.depth must be 1"); 2507 } 2508 } 2509 2510 // mipLevels must be less than or equal to floor(log2(max(extent.width,extent.height,extent.depth)))+1 2511 uint32_t maxDim = std::max(std::max(pCreateInfo->extent.width, pCreateInfo->extent.height), pCreateInfo->extent.depth); 2512 if (pCreateInfo->mipLevels > (floor(log2(maxDim)) + 1)) { 2513 skip_call |= 2514 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName, 2515 "vkCreateImage: pCreateInfo->mipLevels must be less than or equal to " 2516 "floor(log2(max(pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth)))+1"); 2517 } 2518 2519 // If flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain 2520 // VK_IMAGE_CREATE_SPARSE_BINDING_BIT 2521 if (((pCreateInfo->flags & (VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT | VK_IMAGE_CREATE_SPARSE_ALIASED_BIT)) != 0) && 2522 ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) != VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) { 2523 skip_call |= 2524 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName, 2525 "vkCreateImage: pCreateInfo->flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or " 2526 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT"); 2527 } 2528 } 2529 2530 if (!skip_call) { 2531 result = get_dispatch_table(pc_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage); 2532 2533 validate_result(report_data, "vkCreateImage", result); 2534 } 2535 2536 return result; 2537} 2538 2539VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) { 2540 bool skip_call = false; 2541 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2542 assert(my_data != NULL); 2543 2544 skip_call |= parameter_validation_vkDestroyImage(my_data->report_data, image, pAllocator); 2545 2546 if (!skip_call) { 2547 get_dispatch_table(pc_device_table_map, device)->DestroyImage(device, image, pAllocator); 2548 } 2549} 2550 2551bool PreGetImageSubresourceLayout(VkDevice device, const VkImageSubresource *pSubresource) { 2552 if (pSubresource != nullptr) { 2553 if ((pSubresource->aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | 2554 VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 2555 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2556 UNRECOGNIZED_VALUE, LayerName, 2557 "vkGetImageSubresourceLayout parameter, VkImageAspect pSubresource->aspectMask, is an unrecognized enumerator"); 2558 return false; 2559 } 2560 } 2561 2562 return true; 2563} 2564 2565VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, 2566 VkSubresourceLayout *pLayout) { 2567 bool skip_call = false; 2568 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2569 assert(my_data != NULL); 2570 2571 skip_call |= parameter_validation_vkGetImageSubresourceLayout(my_data->report_data, image, pSubresource, pLayout); 2572 2573 if (!skip_call) { 2574 PreGetImageSubresourceLayout(device, pSubresource); 2575 2576 get_dispatch_table(pc_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout); 2577 } 2578} 2579 2580VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, 2581 const VkAllocationCallbacks *pAllocator, VkImageView *pView) { 2582 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2583 bool skip_call = false; 2584 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2585 assert(my_data != NULL); 2586 debug_report_data *report_data = my_data->report_data; 2587 2588 skip_call |= parameter_validation_vkCreateImageView(report_data, pCreateInfo, pAllocator, pView); 2589 2590 if (pCreateInfo != nullptr) { 2591 if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) || (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D)) { 2592 if ((pCreateInfo->subresourceRange.layerCount != 1) && 2593 (pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) { 2594 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2595 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD, " 2596 "pCreateInfo->subresourceRange.layerCount must be 1", 2597 ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) ? 1 : 2)); 2598 } 2599 } else if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) || 2600 (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) { 2601 if ((pCreateInfo->subresourceRange.layerCount < 1) && 2602 (pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) { 2603 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2604 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD_ARRAY, " 2605 "pCreateInfo->subresourceRange.layerCount must be >= 1", 2606 ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? 1 : 2)); 2607 } 2608 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE) { 2609 if ((pCreateInfo->subresourceRange.layerCount != 6) && 2610 (pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) { 2611 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2612 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE, " 2613 "pCreateInfo->subresourceRange.layerCount must be 6"); 2614 } 2615 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) { 2616 if (((pCreateInfo->subresourceRange.layerCount == 0) || ((pCreateInfo->subresourceRange.layerCount % 6) != 0)) && 2617 (pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) { 2618 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2619 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE_ARRAY, " 2620 "pCreateInfo->subresourceRange.layerCount must be a multiple of 6"); 2621 } 2622 } else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_3D) { 2623 if (pCreateInfo->subresourceRange.baseArrayLayer != 0) { 2624 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2625 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, " 2626 "pCreateInfo->subresourceRange.baseArrayLayer must be 0"); 2627 } 2628 2629 if ((pCreateInfo->subresourceRange.layerCount != 1) && 2630 (pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) { 2631 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, 2632 LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, " 2633 "pCreateInfo->subresourceRange.layerCount must be 1"); 2634 } 2635 } 2636 } 2637 2638 if (!skip_call) { 2639 result = get_dispatch_table(pc_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView); 2640 2641 validate_result(my_data->report_data, "vkCreateImageView", result); 2642 } 2643 2644 return result; 2645} 2646 2647VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) { 2648 bool skip_call = false; 2649 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2650 assert(my_data != NULL); 2651 2652 skip_call |= parameter_validation_vkDestroyImageView(my_data->report_data, imageView, pAllocator); 2653 2654 if (!skip_call) { 2655 get_dispatch_table(pc_device_table_map, device)->DestroyImageView(device, imageView, pAllocator); 2656 } 2657} 2658 2659VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, 2660 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) { 2661 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2662 bool skip_call = false; 2663 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2664 assert(my_data != NULL); 2665 2666 skip_call |= parameter_validation_vkCreateShaderModule(my_data->report_data, pCreateInfo, pAllocator, pShaderModule); 2667 2668 if (!skip_call) { 2669 result = 2670 get_dispatch_table(pc_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 2671 2672 validate_result(my_data->report_data, "vkCreateShaderModule", result); 2673 } 2674 2675 return result; 2676} 2677 2678VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, 2679 const VkAllocationCallbacks *pAllocator) { 2680 bool skip_call = false; 2681 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2682 assert(my_data != NULL); 2683 2684 skip_call |= parameter_validation_vkDestroyShaderModule(my_data->report_data, shaderModule, pAllocator); 2685 2686 if (!skip_call) { 2687 get_dispatch_table(pc_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator); 2688 } 2689} 2690 2691VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, 2692 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) { 2693 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2694 bool skip_call = false; 2695 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2696 assert(my_data != NULL); 2697 2698 skip_call |= parameter_validation_vkCreatePipelineCache(my_data->report_data, pCreateInfo, pAllocator, pPipelineCache); 2699 2700 if (!skip_call) { 2701 result = 2702 get_dispatch_table(pc_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); 2703 2704 validate_result(my_data->report_data, "vkCreatePipelineCache", result); 2705 } 2706 2707 return result; 2708} 2709 2710VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, 2711 const VkAllocationCallbacks *pAllocator) { 2712 bool skip_call = false; 2713 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2714 assert(my_data != NULL); 2715 2716 skip_call |= parameter_validation_vkDestroyPipelineCache(my_data->report_data, pipelineCache, pAllocator); 2717 2718 if (!skip_call) { 2719 get_dispatch_table(pc_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator); 2720 } 2721} 2722 2723VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, 2724 void *pData) { 2725 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2726 bool skip_call = false; 2727 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2728 assert(my_data != NULL); 2729 2730 skip_call |= parameter_validation_vkGetPipelineCacheData(my_data->report_data, pipelineCache, pDataSize, pData); 2731 2732 if (!skip_call) { 2733 result = get_dispatch_table(pc_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData); 2734 2735 validate_result(my_data->report_data, "vkGetPipelineCacheData", result); 2736 } 2737 2738 return result; 2739} 2740 2741VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, 2742 const VkPipelineCache *pSrcCaches) { 2743 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2744 bool skip_call = false; 2745 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2746 assert(my_data != NULL); 2747 2748 skip_call |= parameter_validation_vkMergePipelineCaches(my_data->report_data, dstCache, srcCacheCount, pSrcCaches); 2749 2750 if (!skip_call) { 2751 result = get_dispatch_table(pc_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); 2752 2753 validate_result(my_data->report_data, "vkMergePipelineCaches", result); 2754 } 2755 2756 return result; 2757} 2758 2759bool PreCreateGraphicsPipelines(VkDevice device, const VkGraphicsPipelineCreateInfo *pCreateInfos) { 2760 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2761 2762 // TODO: Handle count 2763 if (pCreateInfos != nullptr) { 2764 if (pCreateInfos->flags | VK_PIPELINE_CREATE_DERIVATIVE_BIT) { 2765 if (pCreateInfos->basePipelineIndex != -1) { 2766 if (pCreateInfos->basePipelineHandle != VK_NULL_HANDLE) { 2767 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2768 INVALID_USAGE, LayerName, 2769 "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineHandle, must be VK_NULL_HANDLE if " 2770 "pCreateInfos->flags " 2771 "contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag and pCreateInfos->basePipelineIndex is not -1"); 2772 return false; 2773 } 2774 } 2775 2776 if (pCreateInfos->basePipelineHandle != VK_NULL_HANDLE) { 2777 if (pCreateInfos->basePipelineIndex != -1) { 2778 log_msg( 2779 mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2780 INVALID_USAGE, LayerName, 2781 "vkCreateGraphicsPipelines parameter, pCreateInfos->basePipelineIndex, must be -1 if pCreateInfos->flags " 2782 "contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag and pCreateInfos->basePipelineHandle is not " 2783 "VK_NULL_HANDLE"); 2784 return false; 2785 } 2786 } 2787 } 2788 2789 if (pCreateInfos->pRasterizationState != nullptr) { 2790 if (pCreateInfos->pRasterizationState->cullMode & ~VK_CULL_MODE_FRONT_AND_BACK) { 2791 log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2792 UNRECOGNIZED_VALUE, LayerName, 2793 "vkCreateGraphicsPipelines parameter, VkCullMode pCreateInfos->pRasterizationState->cullMode, is an " 2794 "unrecognized enumerator"); 2795 return false; 2796 } 2797 2798 if ((pCreateInfos->pRasterizationState->polygonMode != VK_POLYGON_MODE_FILL) && 2799 (data->physical_device_features.fillModeNonSolid == false)) { 2800 log_msg( 2801 mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2802 DEVICE_FEATURE, LayerName, 2803 "vkCreateGraphicsPipelines parameter, VkPolygonMode pCreateInfos->pRasterizationState->polygonMode cannot be " 2804 "VK_POLYGON_MODE_POINT or VK_POLYGON_MODE_LINE if VkPhysicalDeviceFeatures->fillModeNonSolid is false."); 2805 return false; 2806 } 2807 } 2808 2809 int i = 0; 2810 for (size_t j = 0; j < pCreateInfos[i].stageCount; j++) { 2811 validate_string(data->report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pStages[j].pName", 2812 pCreateInfos[i].pStages[j].pName); 2813 } 2814 } 2815 2816 return true; 2817} 2818 2819VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 2820 const VkGraphicsPipelineCreateInfo *pCreateInfos, 2821 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { 2822 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 2823 bool skip_call = false; 2824 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2825 assert(device_data != nullptr); 2826 debug_report_data *report_data = device_data->report_data; 2827 2828 skip_call |= parameter_validation_vkCreateGraphicsPipelines(report_data, pipelineCache, createInfoCount, pCreateInfos, 2829 pAllocator, pPipelines); 2830 2831 if (pCreateInfos != nullptr) { 2832 for (uint32_t i = 0; i < createInfoCount; ++i) { 2833 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 2834 if (pCreateInfos[i].pTessellationState == nullptr) { 2835 if (pCreateInfos[i].pStages != nullptr) { 2836 // If pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, 2837 // pTessellationState must not be NULL 2838 bool has_control = false; 2839 bool has_eval = false; 2840 2841 for (uint32_t stage_index = 0; stage_index < pCreateInfos[i].stageCount; ++stage_index) { 2842 if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) { 2843 has_control = true; 2844 } else if (pCreateInfos[i].pStages[stage_index].stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) { 2845 has_eval = true; 2846 } 2847 } 2848 2849 if (has_control && has_eval) { 2850 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2851 __LINE__, REQUIRED_PARAMETER, LayerName, 2852 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pStages includes a tessellation " 2853 "control shader stage and a tessellation evaluation shader stage, " 2854 "pCreateInfos[%d].pTessellationState must not be NULL", 2855 i, i); 2856 } 2857 } 2858 } else { 2859 skip_call |= 2860 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->pNext", 2861 NULL, pCreateInfos[i].pTessellationState->pNext, 0, NULL, GeneratedHeaderVersion); 2862 2863 skip_call |= 2864 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pTessellationState->flags", 2865 pCreateInfos[i].pTessellationState->flags); 2866 2867 if (pCreateInfos[i].pTessellationState->sType != VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO) { 2868 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2869 __LINE__, INVALID_STRUCT_STYPE, LayerName, 2870 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pTessellationState->sType must be " 2871 "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO", 2872 i); 2873 } 2874 } 2875 2876 if (pCreateInfos[i].pViewportState == nullptr) { 2877 // If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a pointer to a 2878 // valid VkPipelineViewportStateCreateInfo structure 2879 if ((pCreateInfos[i].pRasterizationState != nullptr) && 2880 (pCreateInfos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE)) { 2881 skip_call |= log_msg( 2882 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2883 REQUIRED_PARAMETER, LayerName, 2884 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pRasterizationState->rasterizerDiscardEnable is VK_FALSE, " 2885 "pCreateInfos[%d].pViewportState must be a pointer to a valid VkPipelineViewportStateCreateInfo structure", 2886 i, i); 2887 } 2888 } else { 2889 skip_call |= 2890 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pViewportState->pNext", NULL, 2891 pCreateInfos[i].pViewportState->pNext, 0, NULL, GeneratedHeaderVersion); 2892 2893 skip_call |= 2894 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pViewportState->flags", 2895 pCreateInfos[i].pViewportState->flags); 2896 2897 if (pCreateInfos[i].pViewportState->sType != VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO) { 2898 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2899 __LINE__, INVALID_STRUCT_STYPE, LayerName, 2900 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pViewportState->sType must be " 2901 "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO", 2902 i); 2903 } 2904 2905 if (pCreateInfos[i].pDynamicState != nullptr) { 2906 bool has_dynamic_viewport = false; 2907 bool has_dynamic_scissor = false; 2908 2909 for (uint32_t state_index = 0; state_index < pCreateInfos[i].pDynamicState->dynamicStateCount; ++state_index) { 2910 if (pCreateInfos[i].pDynamicState->pDynamicStates[state_index] == VK_DYNAMIC_STATE_VIEWPORT) { 2911 has_dynamic_viewport = true; 2912 } else if (pCreateInfos[i].pDynamicState->pDynamicStates[state_index] == VK_DYNAMIC_STATE_SCISSOR) { 2913 has_dynamic_scissor = true; 2914 } 2915 } 2916 2917 // viewportCount must be greater than 0 2918 // TODO: viewportCount must be 1 when multiple_viewport feature is not enabled 2919 if (pCreateInfos[i].pViewportState->viewportCount == 0) { 2920 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2921 __LINE__, REQUIRED_PARAMETER, LayerName, 2922 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates " 2923 "contains VK_DYNAMIC_STATE_VIEWPORT, pCreateInfos[%d].pViewportState->viewportCount " 2924 "must be greater than 0", 2925 i, i); 2926 } 2927 2928 // If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports 2929 // member of pViewportState must be a pointer to an array of pViewportState->viewportCount VkViewport structures 2930 if (!has_dynamic_viewport && (pCreateInfos[i].pViewportState->pViewports == nullptr)) { 2931 skip_call |= 2932 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2933 __LINE__, REQUIRED_PARAMETER, LayerName, 2934 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates contains " 2935 "VK_DYNAMIC_STATE_VIEWPORT, pCreateInfos[%d].pViewportState->pViewports must not be NULL", 2936 i, i); 2937 } 2938 2939 // scissorCount must be greater than 0 2940 // TODO: scissorCount must be 1 when multiple_viewport feature is not enabled 2941 if (pCreateInfos[i].pViewportState->scissorCount == 0) { 2942 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2943 __LINE__, REQUIRED_PARAMETER, LayerName, 2944 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates " 2945 "contains VK_DYNAMIC_STATE_SCISSOR, pCreateInfos[%d].pViewportState->scissorCount " 2946 "must be greater than 0", 2947 i, i); 2948 } 2949 2950 // If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member 2951 // of pViewportState must be a pointer to an array of pViewportState->scissorCount VkRect2D structures 2952 if (!has_dynamic_scissor && (pCreateInfos[i].pViewportState->pScissors == nullptr)) { 2953 skip_call |= 2954 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 2955 __LINE__, REQUIRED_PARAMETER, LayerName, 2956 "vkCreateGraphicsPipelines: if pCreateInfos[%d].pDynamicState->pDynamicStates contains " 2957 "VK_DYNAMIC_STATE_SCISSOR, pCreateInfos[%d].pViewportState->pScissors must not be NULL", 2958 i, i); 2959 } 2960 } 2961 } 2962 2963 if (pCreateInfos[i].pMultisampleState == nullptr) { 2964 // If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pMultisampleState must be a pointer to 2965 // a valid VkPipelineMultisampleStateCreateInfo structure 2966 if ((pCreateInfos[i].pRasterizationState != nullptr) && 2967 pCreateInfos[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE) { 2968 skip_call |= 2969 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 2970 REQUIRED_PARAMETER, LayerName, "vkCreateGraphicsPipelines: if " 2971 "pCreateInfos[%d].pRasterizationState->rasterizerDiscardEnable is " 2972 "VK_FALSE, pCreateInfos[%d].pMultisampleState must not be NULL", 2973 i, i); 2974 } 2975 } else { 2976 skip_call |= 2977 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->pNext", 2978 NULL, pCreateInfos[i].pMultisampleState->pNext, 0, NULL, GeneratedHeaderVersion); 2979 2980 skip_call |= 2981 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->flags", 2982 pCreateInfos[i].pMultisampleState->flags); 2983 2984 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2985 "pCreateInfos[i].pMultisampleState->sampleShadingEnable", 2986 pCreateInfos[i].pMultisampleState->sampleShadingEnable); 2987 2988 skip_call |= validate_array( 2989 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->rasterizationSamples", 2990 "pCreateInfos[i].pMultisampleState->pSampleMask", pCreateInfos[i].pMultisampleState->rasterizationSamples, 2991 pCreateInfos[i].pMultisampleState->pSampleMask, true, false); 2992 2993 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 2994 "pCreateInfos[i].pMultisampleState->alphaToCoverageEnable", 2995 pCreateInfos[i].pMultisampleState->alphaToCoverageEnable); 2996 2997 skip_call |= 2998 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pMultisampleState->alphaToOneEnable", 2999 pCreateInfos[i].pMultisampleState->alphaToOneEnable); 3000 3001 if (pCreateInfos[i].pMultisampleState->sType != VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO) { 3002 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3003 __LINE__, INVALID_STRUCT_STYPE, LayerName, 3004 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pMultisampleState->sType must be " 3005 "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO", 3006 i); 3007 } 3008 } 3009 3010 // TODO: Conditional NULL check based on rasterizerDiscardEnable and subpass 3011 if (pCreateInfos[i].pDepthStencilState != nullptr) { 3012 skip_call |= 3013 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->pNext", 3014 NULL, pCreateInfos[i].pDepthStencilState->pNext, 0, NULL, GeneratedHeaderVersion); 3015 3016 skip_call |= 3017 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->flags", 3018 pCreateInfos[i].pDepthStencilState->flags); 3019 3020 skip_call |= 3021 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->depthTestEnable", 3022 pCreateInfos[i].pDepthStencilState->depthTestEnable); 3023 3024 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 3025 "pCreateInfos[i].pDepthStencilState->depthWriteEnable", 3026 pCreateInfos[i].pDepthStencilState->depthWriteEnable); 3027 3028 skip_call |= validate_ranged_enum( 3029 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->depthCompareOp", "VkCompareOp", 3030 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->depthCompareOp); 3031 3032 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 3033 "pCreateInfos[i].pDepthStencilState->depthBoundsTestEnable", 3034 pCreateInfos[i].pDepthStencilState->depthBoundsTestEnable); 3035 3036 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 3037 "pCreateInfos[i].pDepthStencilState->stencilTestEnable", 3038 pCreateInfos[i].pDepthStencilState->stencilTestEnable); 3039 3040 skip_call |= validate_ranged_enum( 3041 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.failOp", "VkStencilOp", 3042 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.failOp); 3043 3044 skip_call |= validate_ranged_enum( 3045 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.passOp", "VkStencilOp", 3046 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.passOp); 3047 3048 skip_call |= validate_ranged_enum(report_data, "vkCreateGraphicsPipelines", 3049 "pCreateInfos[i].pDepthStencilState->front.depthFailOp", "VkStencilOp", 3050 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, 3051 pCreateInfos[i].pDepthStencilState->front.depthFailOp); 3052 3053 skip_call |= validate_ranged_enum( 3054 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->front.compareOp", "VkCompareOp", 3055 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->front.compareOp); 3056 3057 skip_call |= validate_ranged_enum( 3058 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.failOp", "VkStencilOp", 3059 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.failOp); 3060 3061 skip_call |= validate_ranged_enum( 3062 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.passOp", "VkStencilOp", 3063 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.passOp); 3064 3065 skip_call |= validate_ranged_enum( 3066 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.depthFailOp", "VkStencilOp", 3067 VK_STENCIL_OP_BEGIN_RANGE, VK_STENCIL_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.depthFailOp); 3068 3069 skip_call |= validate_ranged_enum( 3070 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pDepthStencilState->back.compareOp", "VkCompareOp", 3071 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfos[i].pDepthStencilState->back.compareOp); 3072 3073 if (pCreateInfos[i].pDepthStencilState->sType != VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO) { 3074 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3075 __LINE__, INVALID_STRUCT_STYPE, LayerName, 3076 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pDepthStencilState->sType must be " 3077 "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO", 3078 i); 3079 } 3080 } 3081 3082 // TODO: Conditional NULL check based on rasterizerDiscardEnable and subpass 3083 if (pCreateInfos[i].pColorBlendState != nullptr) { 3084 skip_call |= 3085 validate_struct_pnext(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->pNext", NULL, 3086 pCreateInfos[i].pColorBlendState->pNext, 0, NULL, GeneratedHeaderVersion); 3087 3088 skip_call |= 3089 validate_reserved_flags(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->flags", 3090 pCreateInfos[i].pColorBlendState->flags); 3091 3092 skip_call |= 3093 validate_bool32(report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->logicOpEnable", 3094 pCreateInfos[i].pColorBlendState->logicOpEnable); 3095 3096 skip_call |= validate_array( 3097 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->attachmentCount", 3098 "pCreateInfos[i].pColorBlendState->pAttachments", pCreateInfos[i].pColorBlendState->attachmentCount, 3099 pCreateInfos[i].pColorBlendState->pAttachments, false, true); 3100 3101 if (pCreateInfos[i].pColorBlendState->pAttachments != NULL) { 3102 for (uint32_t attachmentIndex = 0; attachmentIndex < pCreateInfos[i].pColorBlendState->attachmentCount; 3103 ++attachmentIndex) { 3104 skip_call |= validate_bool32(report_data, "vkCreateGraphicsPipelines", 3105 "pCreateInfos[i].pColorBlendState->pAttachments[i].blendEnable", 3106 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].blendEnable); 3107 3108 skip_call |= validate_ranged_enum( 3109 report_data, "vkCreateGraphicsPipelines", 3110 "pCreateInfos[i].pColorBlendState->pAttachments[i].srcColorBlendFactor", "VkBlendFactor", 3111 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3112 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcColorBlendFactor); 3113 3114 skip_call |= validate_ranged_enum( 3115 report_data, "vkCreateGraphicsPipelines", 3116 "pCreateInfos[i].pColorBlendState->pAttachments[i].dstColorBlendFactor", "VkBlendFactor", 3117 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3118 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstColorBlendFactor); 3119 3120 skip_call |= validate_ranged_enum( 3121 report_data, "vkCreateGraphicsPipelines", 3122 "pCreateInfos[i].pColorBlendState->pAttachments[i].colorBlendOp", "VkBlendOp", VK_BLEND_OP_BEGIN_RANGE, 3123 VK_BLEND_OP_END_RANGE, pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorBlendOp); 3124 3125 skip_call |= validate_ranged_enum( 3126 report_data, "vkCreateGraphicsPipelines", 3127 "pCreateInfos[i].pColorBlendState->pAttachments[i].srcAlphaBlendFactor", "VkBlendFactor", 3128 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3129 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].srcAlphaBlendFactor); 3130 3131 skip_call |= validate_ranged_enum( 3132 report_data, "vkCreateGraphicsPipelines", 3133 "pCreateInfos[i].pColorBlendState->pAttachments[i].dstAlphaBlendFactor", "VkBlendFactor", 3134 VK_BLEND_FACTOR_BEGIN_RANGE, VK_BLEND_FACTOR_END_RANGE, 3135 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].dstAlphaBlendFactor); 3136 3137 skip_call |= validate_ranged_enum( 3138 report_data, "vkCreateGraphicsPipelines", 3139 "pCreateInfos[i].pColorBlendState->pAttachments[i].alphaBlendOp", "VkBlendOp", VK_BLEND_OP_BEGIN_RANGE, 3140 VK_BLEND_OP_END_RANGE, pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].alphaBlendOp); 3141 3142 skip_call |= 3143 validate_flags(report_data, "vkCreateGraphicsPipelines", 3144 "pCreateInfos[i].pColorBlendState->pAttachments[i].colorWriteMask", 3145 "VkColorComponentFlagBits", AllVkColorComponentFlagBits, 3146 pCreateInfos[i].pColorBlendState->pAttachments[attachmentIndex].colorWriteMask, false); 3147 } 3148 } 3149 3150 if (pCreateInfos[i].pColorBlendState->sType != VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO) { 3151 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3152 __LINE__, INVALID_STRUCT_STYPE, LayerName, 3153 "vkCreateGraphicsPipelines: parameter pCreateInfos[%d].pColorBlendState->sType must be " 3154 "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO", 3155 i); 3156 } 3157 3158 // If logicOpEnable is VK_TRUE, logicOp must be a valid VkLogicOp value 3159 if (pCreateInfos[i].pColorBlendState->logicOpEnable == VK_TRUE) { 3160 skip_call |= validate_ranged_enum( 3161 report_data, "vkCreateGraphicsPipelines", "pCreateInfos[i].pColorBlendState->logicOp", "VkLogicOp", 3162 VK_LOGIC_OP_BEGIN_RANGE, VK_LOGIC_OP_END_RANGE, pCreateInfos[i].pColorBlendState->logicOp); 3163 } 3164 } 3165 } 3166 } 3167 3168 if (!skip_call) { 3169 PreCreateGraphicsPipelines(device, pCreateInfos); 3170 3171 result = get_dispatch_table(pc_device_table_map, device) 3172 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3173 3174 validate_result(report_data, "vkCreateGraphicsPipelines", result); 3175 } 3176 3177 return result; 3178} 3179 3180bool PreCreateComputePipelines(VkDevice device, const VkComputePipelineCreateInfo *pCreateInfos) { 3181 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3182 3183 if (pCreateInfos != nullptr) { 3184 // TODO: Handle count! 3185 int i = 0; 3186 validate_string(data->report_data, "vkCreateComputePipelines", "pCreateInfos[i].stage.pName", pCreateInfos[i].stage.pName); 3187 } 3188 3189 return true; 3190} 3191 3192VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 3193 const VkComputePipelineCreateInfo *pCreateInfos, 3194 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { 3195 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3196 bool skip_call = false; 3197 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3198 assert(my_data != NULL); 3199 3200 skip_call |= parameter_validation_vkCreateComputePipelines(my_data->report_data, pipelineCache, createInfoCount, pCreateInfos, 3201 pAllocator, pPipelines); 3202 3203 if (!skip_call) { 3204 PreCreateComputePipelines(device, pCreateInfos); 3205 3206 result = get_dispatch_table(pc_device_table_map, device) 3207 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3208 3209 validate_result(my_data->report_data, "vkCreateComputePipelines", result); 3210 } 3211 3212 return result; 3213} 3214 3215VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) { 3216 bool skip_call = false; 3217 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3218 assert(my_data != NULL); 3219 3220 skip_call |= parameter_validation_vkDestroyPipeline(my_data->report_data, pipeline, pAllocator); 3221 3222 if (!skip_call) { 3223 get_dispatch_table(pc_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator); 3224 } 3225} 3226 3227VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, 3228 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) { 3229 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3230 bool skip_call = false; 3231 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3232 assert(my_data != NULL); 3233 3234 skip_call |= parameter_validation_vkCreatePipelineLayout(my_data->report_data, pCreateInfo, pAllocator, pPipelineLayout); 3235 3236 if (!skip_call) { 3237 result = 3238 get_dispatch_table(pc_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); 3239 3240 validate_result(my_data->report_data, "vkCreatePipelineLayout", result); 3241 } 3242 3243 return result; 3244} 3245 3246VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, 3247 const VkAllocationCallbacks *pAllocator) { 3248 bool skip_call = false; 3249 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3250 assert(my_data != NULL); 3251 3252 skip_call |= parameter_validation_vkDestroyPipelineLayout(my_data->report_data, pipelineLayout, pAllocator); 3253 3254 if (!skip_call) { 3255 get_dispatch_table(pc_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator); 3256 } 3257} 3258 3259VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, 3260 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) { 3261 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3262 bool skip_call = false; 3263 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3264 assert(device_data != NULL); 3265 debug_report_data *report_data = device_data->report_data; 3266 3267 skip_call |= parameter_validation_vkCreateSampler(report_data, pCreateInfo, pAllocator, pSampler); 3268 3269 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3270 if (pCreateInfo != nullptr) { 3271 // If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value 3272 if (pCreateInfo->compareEnable == VK_TRUE) { 3273 skip_call |= validate_ranged_enum(report_data, "vkCreateSampler", "pCreateInfo->compareOp", "VkCompareOp", 3274 VK_COMPARE_OP_BEGIN_RANGE, VK_COMPARE_OP_END_RANGE, pCreateInfo->compareOp); 3275 } 3276 3277 // If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a 3278 // valid VkBorderColor value 3279 if ((pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) || 3280 (pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) || 3281 (pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)) { 3282 skip_call |= validate_ranged_enum(report_data, "vkCreateSampler", "pCreateInfo->borderColor", "VkBorderColor", 3283 VK_BORDER_COLOR_BEGIN_RANGE, VK_BORDER_COLOR_END_RANGE, pCreateInfo->borderColor); 3284 } 3285 } 3286 3287 if (!skip_call) { 3288 result = get_dispatch_table(pc_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler); 3289 3290 validate_result(report_data, "vkCreateSampler", result); 3291 } 3292 3293 return result; 3294} 3295 3296VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) { 3297 bool skip_call = false; 3298 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3299 assert(my_data != NULL); 3300 3301 skip_call |= parameter_validation_vkDestroySampler(my_data->report_data, sampler, pAllocator); 3302 3303 if (!skip_call) { 3304 get_dispatch_table(pc_device_table_map, device)->DestroySampler(device, sampler, pAllocator); 3305 } 3306} 3307 3308VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, 3309 const VkAllocationCallbacks *pAllocator, 3310 VkDescriptorSetLayout *pSetLayout) { 3311 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3312 bool skip_call = false; 3313 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3314 assert(device_data != nullptr); 3315 debug_report_data *report_data = device_data->report_data; 3316 3317 skip_call |= parameter_validation_vkCreateDescriptorSetLayout(report_data, pCreateInfo, pAllocator, pSetLayout); 3318 3319 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3320 if ((pCreateInfo != nullptr) && (pCreateInfo->pBindings != nullptr)) { 3321 for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) { 3322 if (pCreateInfo->pBindings[i].descriptorCount != 0) { 3323 // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and descriptorCount 3324 // is not 0 and pImmutableSamplers is not NULL, pImmutableSamplers must be a pointer to an array of descriptorCount 3325 // valid VkSampler handles 3326 if (((pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || 3327 (pCreateInfo->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) && 3328 (pCreateInfo->pBindings[i].pImmutableSamplers != nullptr)) { 3329 for (uint32_t descriptor_index = 0; descriptor_index < pCreateInfo->pBindings[i].descriptorCount; 3330 ++descriptor_index) { 3331 if (pCreateInfo->pBindings[i].pImmutableSamplers[descriptor_index] == VK_NULL_HANDLE) { 3332 skip_call |= 3333 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3334 __LINE__, REQUIRED_PARAMETER, LayerName, "vkCreateDescriptorSetLayout: required parameter " 3335 "pCreateInfo->pBindings[%d].pImmutableSamplers[%d]" 3336 " specified as VK_NULL_HANDLE", 3337 i, descriptor_index); 3338 } 3339 } 3340 } 3341 3342 // If descriptorCount is not 0, stageFlags must be a valid combination of VkShaderStageFlagBits values 3343 if ((pCreateInfo->pBindings[i].stageFlags != 0) && 3344 ((pCreateInfo->pBindings[i].stageFlags & (~AllVkShaderStageFlagBits)) != 0)) { 3345 skip_call |= 3346 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3347 UNRECOGNIZED_VALUE, LayerName, 3348 "vkCreateDescriptorSetLayout: if pCreateInfo->pBindings[%d].descriptorCount is not 0, " 3349 "pCreateInfo->pBindings[%d].stageFlags must be a valid combination of VkShaderStageFlagBits values", 3350 i, i); 3351 } 3352 } 3353 } 3354 } 3355 3356 if (!skip_call) { 3357 result = 3358 get_dispatch_table(pc_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); 3359 3360 validate_result(report_data, "vkCreateDescriptorSetLayout", result); 3361 } 3362 3363 return result; 3364} 3365 3366VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, 3367 const VkAllocationCallbacks *pAllocator) { 3368 bool skip_call = false; 3369 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3370 assert(my_data != NULL); 3371 3372 skip_call |= parameter_validation_vkDestroyDescriptorSetLayout(my_data->report_data, descriptorSetLayout, pAllocator); 3373 3374 if (!skip_call) { 3375 get_dispatch_table(pc_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); 3376 } 3377} 3378 3379VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, 3380 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) { 3381 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3382 bool skip_call = false; 3383 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3384 assert(my_data != NULL); 3385 3386 skip_call |= parameter_validation_vkCreateDescriptorPool(my_data->report_data, pCreateInfo, pAllocator, pDescriptorPool); 3387 3388 /* TODOVV: How do we validate maxSets? Probably belongs in the limits layer? */ 3389 3390 if (!skip_call) { 3391 result = 3392 get_dispatch_table(pc_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); 3393 3394 validate_result(my_data->report_data, "vkCreateDescriptorPool", result); 3395 } 3396 3397 return result; 3398} 3399 3400VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, 3401 const VkAllocationCallbacks *pAllocator) { 3402 bool skip_call = false; 3403 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3404 assert(my_data != NULL); 3405 3406 skip_call |= parameter_validation_vkDestroyDescriptorPool(my_data->report_data, descriptorPool, pAllocator); 3407 3408 if (!skip_call) { 3409 get_dispatch_table(pc_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator); 3410 } 3411} 3412 3413VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, 3414 VkDescriptorPoolResetFlags flags) { 3415 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3416 bool skip_call = false; 3417 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3418 assert(my_data != NULL); 3419 3420 skip_call |= parameter_validation_vkResetDescriptorPool(my_data->report_data, descriptorPool, flags); 3421 3422 if (!skip_call) { 3423 result = get_dispatch_table(pc_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags); 3424 3425 validate_result(my_data->report_data, "vkResetDescriptorPool", result); 3426 } 3427 3428 return result; 3429} 3430 3431VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, 3432 VkDescriptorSet *pDescriptorSets) { 3433 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3434 bool skip_call = false; 3435 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3436 assert(my_data != NULL); 3437 3438 skip_call |= parameter_validation_vkAllocateDescriptorSets(my_data->report_data, pAllocateInfo, pDescriptorSets); 3439 3440 if (!skip_call) { 3441 result = get_dispatch_table(pc_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 3442 3443 validate_result(my_data->report_data, "vkAllocateDescriptorSets", result); 3444 } 3445 3446 return result; 3447} 3448 3449VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, 3450 const VkDescriptorSet *pDescriptorSets) { 3451 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3452 bool skip_call = false; 3453 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3454 assert(device_data != nullptr); 3455 debug_report_data *report_data = device_data->report_data; 3456 3457 skip_call |= parameter_validation_vkFreeDescriptorSets(report_data, descriptorPool, descriptorSetCount, pDescriptorSets); 3458 3459 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3460 // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond 3461 // validate_array() 3462 skip_call |= validate_array(report_data, "vkFreeDescriptorSets", "descriptorSetCount", "pDescriptorSets", descriptorSetCount, 3463 pDescriptorSets, true, true); 3464 3465 if (!skip_call) { 3466 result = get_dispatch_table(pc_device_table_map, device) 3467 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); 3468 3469 validate_result(report_data, "vkFreeDescriptorSets", result); 3470 } 3471 3472 return result; 3473} 3474 3475VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, 3476 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, 3477 const VkCopyDescriptorSet *pDescriptorCopies) { 3478 bool skip_call = false; 3479 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3480 assert(device_data != NULL); 3481 debug_report_data *report_data = device_data->report_data; 3482 3483 skip_call |= parameter_validation_vkUpdateDescriptorSets(report_data, descriptorWriteCount, pDescriptorWrites, 3484 descriptorCopyCount, pDescriptorCopies); 3485 3486 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3487 if (pDescriptorWrites != NULL) { 3488 for (uint32_t i = 0; i < descriptorWriteCount; ++i) { 3489 // descriptorCount must be greater than 0 3490 if (pDescriptorWrites[i].descriptorCount == 0) { 3491 skip_call |= 3492 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 3493 REQUIRED_PARAMETER, LayerName, 3494 "vkUpdateDescriptorSets: parameter pDescriptorWrites[%d].descriptorCount must be greater than 0", i); 3495 } 3496 3497 if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || 3498 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || 3499 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || 3500 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) || 3501 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { 3502 // If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 3503 // VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 3504 // pImageInfo must be a pointer to an array of descriptorCount valid VkDescriptorImageInfo structures 3505 if (pDescriptorWrites[i].pImageInfo == nullptr) { 3506 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3507 __LINE__, REQUIRED_PARAMETER, LayerName, 3508 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3509 "VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, " 3510 "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or " 3511 "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, pDescriptorWrites[%d].pImageInfo must not be NULL", 3512 i, i); 3513 } else if (pDescriptorWrites[i].descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER) { 3514 // If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 3515 // VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout 3516 // members of any given element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively 3517 for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount; 3518 ++descriptor_index) { 3519 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3520 "pDescriptorWrites[i].pImageInfo[i].imageView", 3521 pDescriptorWrites[i].pImageInfo[descriptor_index].imageView); 3522 skip_call |= validate_ranged_enum(report_data, "vkUpdateDescriptorSets", 3523 "pDescriptorWrites[i].pImageInfo[i].imageLayout", "VkImageLayout", 3524 VK_IMAGE_LAYOUT_BEGIN_RANGE, VK_IMAGE_LAYOUT_END_RANGE, 3525 pDescriptorWrites[i].pImageInfo[descriptor_index].imageLayout); 3526 } 3527 } 3528 } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || 3529 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) || 3530 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || 3531 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { 3532 // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 3533 // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, pBufferInfo must be a 3534 // pointer to an array of descriptorCount valid VkDescriptorBufferInfo structures 3535 if (pDescriptorWrites[i].pBufferInfo == nullptr) { 3536 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3537 __LINE__, REQUIRED_PARAMETER, LayerName, 3538 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3539 "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, " 3540 "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, " 3541 "pDescriptorWrites[%d].pBufferInfo must not be NULL", 3542 i, i); 3543 } else { 3544 for (uint32_t descriptorIndex = 0; descriptorIndex < pDescriptorWrites[i].descriptorCount; ++descriptorIndex) { 3545 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3546 "pDescriptorWrites[i].pBufferInfo[i].buffer", 3547 pDescriptorWrites[i].pBufferInfo[descriptorIndex].buffer); 3548 } 3549 } 3550 } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) || 3551 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) { 3552 // If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 3553 // pTexelBufferView must be a pointer to an array of descriptorCount valid VkBufferView handles 3554 if (pDescriptorWrites[i].pTexelBufferView == nullptr) { 3555 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3556 __LINE__, REQUIRED_PARAMETER, LayerName, 3557 "vkUpdateDescriptorSets: if pDescriptorWrites[%d].descriptorType is " 3558 "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, " 3559 "pDescriptorWrites[%d].pTexelBufferView must not be NULL", 3560 i, i); 3561 } else { 3562 for (uint32_t descriptor_index = 0; descriptor_index < pDescriptorWrites[i].descriptorCount; 3563 ++descriptor_index) { 3564 skip_call |= validate_required_handle(report_data, "vkUpdateDescriptorSets", 3565 "pDescriptorWrites[i].pTexelBufferView[i]", 3566 pDescriptorWrites[i].pTexelBufferView[descriptor_index]); 3567 } 3568 } 3569 } 3570 3571 if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || 3572 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)) { 3573 VkDeviceSize uniformAlignment = device_data->device_limits.minUniformBufferOffsetAlignment; 3574 for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) { 3575 if (pDescriptorWrites[i].pBufferInfo != NULL) { 3576 if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment) != 0) { 3577 skip_call |= 3578 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, 3579 VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVICE_LIMIT, LayerName, 3580 "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (0x%" PRIxLEAST64 3581 ") must be a multiple of device limit minUniformBufferOffsetAlignment 0x%" PRIxLEAST64, 3582 i, j, pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment); 3583 } 3584 } 3585 } 3586 } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) || 3587 (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { 3588 VkDeviceSize storageAlignment = device_data->device_limits.minStorageBufferOffsetAlignment; 3589 for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) { 3590 if (pDescriptorWrites[i].pBufferInfo != NULL) { 3591 if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment) != 0) { 3592 skip_call |= 3593 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, 3594 VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVICE_LIMIT, LayerName, 3595 "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (0x%" PRIxLEAST64 3596 ") must be a multiple of device limit minStorageBufferOffsetAlignment 0x%" PRIxLEAST64, 3597 i, j, pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment); 3598 } 3599 } 3600 } 3601 } 3602 } 3603 } 3604 3605 if (!skip_call) { 3606 get_dispatch_table(pc_device_table_map, device) 3607 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); 3608 } 3609} 3610 3611VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, 3612 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) { 3613 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3614 bool skip_call = false; 3615 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3616 assert(my_data != NULL); 3617 3618 skip_call |= parameter_validation_vkCreateFramebuffer(my_data->report_data, pCreateInfo, pAllocator, pFramebuffer); 3619 3620 if (!skip_call) { 3621 result = get_dispatch_table(pc_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); 3622 3623 validate_result(my_data->report_data, "vkCreateFramebuffer", result); 3624 } 3625 3626 return result; 3627} 3628 3629VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) { 3630 bool skip_call = false; 3631 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3632 assert(my_data != NULL); 3633 3634 skip_call |= parameter_validation_vkDestroyFramebuffer(my_data->report_data, framebuffer, pAllocator); 3635 3636 if (!skip_call) { 3637 get_dispatch_table(pc_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator); 3638 } 3639} 3640 3641bool PreCreateRenderPass(layer_data *dev_data, const VkRenderPassCreateInfo *pCreateInfo) { 3642 bool skip_call = false; 3643 uint32_t max_color_attachments = dev_data->device_limits.maxColorAttachments; 3644 3645 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) { 3646 if (pCreateInfo->pSubpasses[i].colorAttachmentCount > max_color_attachments) { 3647 skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, 3648 __LINE__, DEVICE_LIMIT, "DL", "Cannot create a render pass with %d color attachments. Max is %d.", 3649 pCreateInfo->pSubpasses[i].colorAttachmentCount, max_color_attachments); 3650 } 3651 } 3652 return skip_call; 3653} 3654 3655VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, 3656 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) { 3657 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3658 bool skip_call = false; 3659 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3660 assert(my_data != NULL); 3661 3662 skip_call |= parameter_validation_vkCreateRenderPass(my_data->report_data, pCreateInfo, pAllocator, pRenderPass); 3663 skip_call |= PreCreateRenderPass(my_data, pCreateInfo); 3664 3665 if (!skip_call) { 3666 result = get_dispatch_table(pc_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); 3667 3668 validate_result(my_data->report_data, "vkCreateRenderPass", result); 3669 } 3670 3671 return result; 3672} 3673 3674VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) { 3675 bool skip_call = false; 3676 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3677 assert(my_data != NULL); 3678 3679 skip_call |= parameter_validation_vkDestroyRenderPass(my_data->report_data, renderPass, pAllocator); 3680 3681 if (!skip_call) { 3682 get_dispatch_table(pc_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator); 3683 } 3684} 3685 3686VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) { 3687 bool skip_call = false; 3688 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3689 assert(my_data != NULL); 3690 3691 skip_call |= parameter_validation_vkGetRenderAreaGranularity(my_data->report_data, renderPass, pGranularity); 3692 3693 if (!skip_call) { 3694 get_dispatch_table(pc_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity); 3695 } 3696} 3697 3698VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, 3699 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) { 3700 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3701 bool skip_call = false; 3702 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3703 assert(my_data != NULL); 3704 3705 skip_call |= 3706 validate_queue_family_index(my_data, "vkCreateCommandPool", "pCreateInfo->queueFamilyIndex", pCreateInfo->queueFamilyIndex); 3707 3708 skip_call |= parameter_validation_vkCreateCommandPool(my_data->report_data, pCreateInfo, pAllocator, pCommandPool); 3709 3710 if (!skip_call) { 3711 result = get_dispatch_table(pc_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); 3712 3713 validate_result(my_data->report_data, "vkCreateCommandPool", result); 3714 } 3715 3716 return result; 3717} 3718 3719VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) { 3720 bool skip_call = false; 3721 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3722 assert(my_data != NULL); 3723 3724 skip_call |= parameter_validation_vkDestroyCommandPool(my_data->report_data, commandPool, pAllocator); 3725 3726 if (!skip_call) { 3727 get_dispatch_table(pc_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator); 3728 } 3729} 3730 3731VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) { 3732 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3733 bool skip_call = false; 3734 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3735 assert(my_data != NULL); 3736 3737 skip_call |= parameter_validation_vkResetCommandPool(my_data->report_data, commandPool, flags); 3738 3739 if (!skip_call) { 3740 result = get_dispatch_table(pc_device_table_map, device)->ResetCommandPool(device, commandPool, flags); 3741 3742 validate_result(my_data->report_data, "vkResetCommandPool", result); 3743 } 3744 3745 return result; 3746} 3747 3748VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, 3749 VkCommandBuffer *pCommandBuffers) { 3750 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3751 bool skip_call = false; 3752 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3753 assert(my_data != NULL); 3754 3755 skip_call |= parameter_validation_vkAllocateCommandBuffers(my_data->report_data, pAllocateInfo, pCommandBuffers); 3756 3757 if (!skip_call) { 3758 result = get_dispatch_table(pc_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); 3759 3760 validate_result(my_data->report_data, "vkAllocateCommandBuffers", result); 3761 } 3762 3763 return result; 3764} 3765 3766VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, 3767 const VkCommandBuffer *pCommandBuffers) { 3768 bool skip_call = false; 3769 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3770 assert(device_data != nullptr); 3771 debug_report_data *report_data = device_data->report_data; 3772 3773 skip_call |= parameter_validation_vkFreeCommandBuffers(report_data, commandPool, commandBufferCount, pCommandBuffers); 3774 3775 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3776 // This is an array of handles, where the elements are allowed to be VK_NULL_HANDLE, and does not require any validation beyond 3777 // validate_array() 3778 skip_call |= validate_array(report_data, "vkFreeCommandBuffers", "commandBufferCount", "pCommandBuffers", commandBufferCount, 3779 pCommandBuffers, true, true); 3780 3781 if (!skip_call) { 3782 get_dispatch_table(pc_device_table_map, device) 3783 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); 3784 } 3785} 3786 3787bool PreBeginCommandBuffer(layer_data *dev_data, VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) { 3788 bool skip_call = false; 3789 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(dev_data->physical_device), layer_data_map); 3790 const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo; 3791 3792 if (pInfo != NULL) { 3793 if ((phy_dev_data->physical_device_features.inheritedQueries == VK_FALSE) && (pInfo->occlusionQueryEnable != VK_FALSE)) { 3794 skip_call |= 3795 log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3796 reinterpret_cast<uint64_t>(commandBuffer), __LINE__, DEVICE_FEATURE, LayerName, 3797 "Cannot set inherited occlusionQueryEnable in vkBeginCommandBuffer() when device does not support " 3798 "inheritedQueries."); 3799 } 3800 3801 if ((phy_dev_data->physical_device_features.inheritedQueries != VK_FALSE) && (pInfo->occlusionQueryEnable != VK_FALSE) && 3802 (!validate_VkQueryControlFlagBits(VkQueryControlFlagBits(pInfo->queryFlags)))) { 3803 skip_call |= 3804 log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 3805 reinterpret_cast<uint64_t>(commandBuffer), __LINE__, DEVICE_FEATURE, LayerName, 3806 "Cannot enable in occlusion queries in vkBeginCommandBuffer() and set queryFlags to %d which is not a " 3807 "valid combination of VkQueryControlFlagBits.", 3808 pInfo->queryFlags); 3809 } 3810 } 3811 return skip_call; 3812} 3813 3814VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) { 3815 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3816 bool skip_call = false; 3817 layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3818 assert(device_data != nullptr); 3819 debug_report_data *report_data = device_data->report_data; 3820 3821 skip_call |= parameter_validation_vkBeginCommandBuffer(report_data, pBeginInfo); 3822 3823 // Validation for parameters excluded from the generated validation code due to a 'noautovalidity' tag in vk.xml 3824 // TODO: pBeginInfo->pInheritanceInfo must not be NULL if commandBuffer is a secondary command buffer 3825 skip_call |= validate_struct_type(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo", 3826 "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO", pBeginInfo->pInheritanceInfo, 3827 VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, false); 3828 3829 if (pBeginInfo->pInheritanceInfo != NULL) { 3830 skip_call |= validate_struct_pnext(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pNext", NULL, 3831 pBeginInfo->pInheritanceInfo->pNext, 0, NULL, GeneratedHeaderVersion); 3832 3833 skip_call |= validate_bool32(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->occlusionQueryEnable", 3834 pBeginInfo->pInheritanceInfo->occlusionQueryEnable); 3835 3836 // TODO: This only needs to be validated when the inherited queries feature is enabled 3837 // skip_call |= validate_flags(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->queryFlags", 3838 // "VkQueryControlFlagBits", AllVkQueryControlFlagBits, pBeginInfo->pInheritanceInfo->queryFlags, false); 3839 3840 // TODO: This must be 0 if the pipeline statistics queries feature is not enabled 3841 skip_call |= validate_flags(report_data, "vkBeginCommandBuffer", "pBeginInfo->pInheritanceInfo->pipelineStatistics", 3842 "VkQueryPipelineStatisticFlagBits", AllVkQueryPipelineStatisticFlagBits, 3843 pBeginInfo->pInheritanceInfo->pipelineStatistics, false); 3844 } 3845 3846 skip_call |= PreBeginCommandBuffer(device_data, commandBuffer, pBeginInfo); 3847 3848 if (!skip_call) { 3849 result = get_dispatch_table(pc_device_table_map, commandBuffer)->BeginCommandBuffer(commandBuffer, pBeginInfo); 3850 3851 validate_result(report_data, "vkBeginCommandBuffer", result); 3852 } 3853 3854 return result; 3855} 3856 3857VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) { 3858 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3859 assert(my_data != NULL); 3860 3861 VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer); 3862 3863 validate_result(my_data->report_data, "vkEndCommandBuffer", result); 3864 3865 return result; 3866} 3867 3868VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) { 3869 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3870 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3871 assert(my_data != NULL); 3872 3873 bool skip_call = parameter_validation_vkResetCommandBuffer(my_data->report_data, flags); 3874 3875 if (!skip_call) { 3876 result = get_dispatch_table(pc_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags); 3877 3878 validate_result(my_data->report_data, "vkResetCommandBuffer", result); 3879 } 3880 3881 return result; 3882} 3883 3884VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, 3885 VkPipeline pipeline) { 3886 bool skip_call = false; 3887 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3888 assert(my_data != NULL); 3889 3890 skip_call |= parameter_validation_vkCmdBindPipeline(my_data->report_data, pipelineBindPoint, pipeline); 3891 3892 if (!skip_call) { 3893 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); 3894 } 3895} 3896 3897VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, 3898 const VkViewport *pViewports) { 3899 bool skip_call = false; 3900 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3901 assert(my_data != NULL); 3902 3903 skip_call |= parameter_validation_vkCmdSetViewport(my_data->report_data, firstViewport, viewportCount, pViewports); 3904 3905 if (!skip_call) { 3906 get_dispatch_table(pc_device_table_map, commandBuffer) 3907 ->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); 3908 } 3909} 3910 3911VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, 3912 const VkRect2D *pScissors) { 3913 bool skip_call = false; 3914 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3915 assert(my_data != NULL); 3916 3917 skip_call |= parameter_validation_vkCmdSetScissor(my_data->report_data, firstScissor, scissorCount, pScissors); 3918 3919 if (!skip_call) { 3920 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); 3921 } 3922} 3923 3924VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) { 3925 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth); 3926} 3927 3928VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, 3929 float depthBiasSlopeFactor) { 3930 get_dispatch_table(pc_device_table_map, commandBuffer) 3931 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); 3932} 3933 3934VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) { 3935 bool skip_call = false; 3936 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3937 assert(my_data != NULL); 3938 3939 skip_call |= parameter_validation_vkCmdSetBlendConstants(my_data->report_data, blendConstants); 3940 3941 if (!skip_call) { 3942 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants); 3943 } 3944} 3945 3946VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) { 3947 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); 3948} 3949 3950VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, 3951 uint32_t compareMask) { 3952 bool skip_call = false; 3953 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3954 assert(my_data != NULL); 3955 3956 skip_call |= parameter_validation_vkCmdSetStencilCompareMask(my_data->report_data, faceMask, compareMask); 3957 3958 if (!skip_call) { 3959 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); 3960 } 3961} 3962 3963VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) { 3964 bool skip_call = false; 3965 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3966 assert(my_data != NULL); 3967 3968 skip_call |= parameter_validation_vkCmdSetStencilWriteMask(my_data->report_data, faceMask, writeMask); 3969 3970 if (!skip_call) { 3971 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); 3972 } 3973} 3974 3975VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) { 3976 bool skip_call = false; 3977 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3978 assert(my_data != NULL); 3979 3980 skip_call |= parameter_validation_vkCmdSetStencilReference(my_data->report_data, faceMask, reference); 3981 3982 if (!skip_call) { 3983 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference); 3984 } 3985} 3986 3987VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, 3988 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, 3989 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, 3990 const uint32_t *pDynamicOffsets) { 3991 bool skip_call = false; 3992 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 3993 assert(my_data != NULL); 3994 3995 skip_call |= 3996 parameter_validation_vkCmdBindDescriptorSets(my_data->report_data, pipelineBindPoint, layout, firstSet, descriptorSetCount, 3997 pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); 3998 3999 if (!skip_call) { 4000 get_dispatch_table(pc_device_table_map, commandBuffer) 4001 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, 4002 dynamicOffsetCount, pDynamicOffsets); 4003 } 4004} 4005 4006VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 4007 VkIndexType indexType) { 4008 bool skip_call = false; 4009 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4010 assert(my_data != NULL); 4011 4012 skip_call |= parameter_validation_vkCmdBindIndexBuffer(my_data->report_data, buffer, offset, indexType); 4013 4014 if (!skip_call) { 4015 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); 4016 } 4017} 4018 4019VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, 4020 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) { 4021 bool skip_call = false; 4022 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4023 assert(my_data != NULL); 4024 4025 skip_call |= parameter_validation_vkCmdBindVertexBuffers(my_data->report_data, firstBinding, bindingCount, pBuffers, pOffsets); 4026 4027 if (!skip_call) { 4028 get_dispatch_table(pc_device_table_map, commandBuffer) 4029 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); 4030 } 4031} 4032 4033bool PreCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, 4034 uint32_t firstInstance) { 4035 if (vertexCount == 0) { 4036 // TODO: Verify against Valid Usage section. I don't see a non-zero vertexCount listed, may need to add that and make 4037 // this an error or leave as is. 4038 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4039 REQUIRED_PARAMETER, LayerName, "vkCmdDraw parameter, uint32_t vertexCount, is 0"); 4040 return false; 4041 } 4042 4043 if (instanceCount == 0) { 4044 // TODO: Verify against Valid Usage section. I don't see a non-zero instanceCount listed, may need to add that and make 4045 // this an error or leave as is. 4046 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4047 REQUIRED_PARAMETER, LayerName, "vkCmdDraw parameter, uint32_t instanceCount, is 0"); 4048 return false; 4049 } 4050 4051 return true; 4052} 4053 4054VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, 4055 uint32_t firstVertex, uint32_t firstInstance) { 4056 PreCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); 4057 4058 get_dispatch_table(pc_device_table_map, commandBuffer) 4059 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); 4060} 4061 4062VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, 4063 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { 4064 get_dispatch_table(pc_device_table_map, commandBuffer) 4065 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); 4066} 4067 4068VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, 4069 uint32_t stride) { 4070 bool skip_call = false; 4071 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4072 assert(my_data != NULL); 4073 4074 skip_call |= parameter_validation_vkCmdDrawIndirect(my_data->report_data, buffer, offset, count, stride); 4075 4076 if (!skip_call) { 4077 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride); 4078 } 4079} 4080 4081VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 4082 uint32_t count, uint32_t stride) { 4083 bool skip_call = false; 4084 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4085 assert(my_data != NULL); 4086 4087 skip_call |= parameter_validation_vkCmdDrawIndexedIndirect(my_data->report_data, buffer, offset, count, stride); 4088 4089 if (!skip_call) { 4090 get_dispatch_table(pc_device_table_map, commandBuffer) 4091 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride); 4092 } 4093} 4094 4095VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) { 4096 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z); 4097} 4098 4099VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) { 4100 bool skip_call = false; 4101 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4102 assert(my_data != NULL); 4103 4104 skip_call |= parameter_validation_vkCmdDispatchIndirect(my_data->report_data, buffer, offset); 4105 4106 if (!skip_call) { 4107 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset); 4108 } 4109} 4110 4111VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, 4112 uint32_t regionCount, const VkBufferCopy *pRegions) { 4113 bool skip_call = false; 4114 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4115 assert(my_data != NULL); 4116 4117 skip_call |= parameter_validation_vkCmdCopyBuffer(my_data->report_data, srcBuffer, dstBuffer, regionCount, pRegions); 4118 4119 if (!skip_call) { 4120 get_dispatch_table(pc_device_table_map, commandBuffer) 4121 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); 4122 } 4123} 4124 4125bool PreCmdCopyImage(VkCommandBuffer commandBuffer, const VkImageCopy *pRegions) { 4126 if (pRegions != nullptr) { 4127 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4128 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4129 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4130 UNRECOGNIZED_VALUE, LayerName, 4131 "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4132 return false; 4133 } 4134 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4135 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4136 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4137 UNRECOGNIZED_VALUE, LayerName, 4138 "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4139 return false; 4140 } 4141 } 4142 4143 return true; 4144} 4145 4146VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 4147 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 4148 const VkImageCopy *pRegions) { 4149 bool skip_call = false; 4150 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4151 assert(my_data != NULL); 4152 4153 skip_call |= parameter_validation_vkCmdCopyImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, 4154 regionCount, pRegions); 4155 4156 if (!skip_call) { 4157 PreCmdCopyImage(commandBuffer, pRegions); 4158 4159 get_dispatch_table(pc_device_table_map, commandBuffer) 4160 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 4161 } 4162} 4163 4164bool PreCmdBlitImage(VkCommandBuffer commandBuffer, const VkImageBlit *pRegions) { 4165 if (pRegions != nullptr) { 4166 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4167 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4168 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4169 UNRECOGNIZED_VALUE, LayerName, 4170 "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4171 return false; 4172 } 4173 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4174 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4175 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4176 UNRECOGNIZED_VALUE, LayerName, 4177 "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4178 return false; 4179 } 4180 } 4181 4182 return true; 4183} 4184 4185VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 4186 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 4187 const VkImageBlit *pRegions, VkFilter filter) { 4188 bool skip_call = false; 4189 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4190 assert(my_data != NULL); 4191 4192 skip_call |= parameter_validation_vkCmdBlitImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, 4193 regionCount, pRegions, filter); 4194 4195 if (!skip_call) { 4196 PreCmdBlitImage(commandBuffer, pRegions); 4197 4198 get_dispatch_table(pc_device_table_map, commandBuffer) 4199 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); 4200 } 4201} 4202 4203bool PreCmdCopyBufferToImage(VkCommandBuffer commandBuffer, const VkBufferImageCopy *pRegions) { 4204 if (pRegions != nullptr) { 4205 if ((pRegions->imageSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4206 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4207 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4208 UNRECOGNIZED_VALUE, LayerName, 4209 "vkCmdCopyBufferToImage parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized " 4210 "enumerator"); 4211 return false; 4212 } 4213 } 4214 4215 return true; 4216} 4217 4218VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, 4219 VkImageLayout dstImageLayout, uint32_t regionCount, 4220 const VkBufferImageCopy *pRegions) { 4221 bool skip_call = false; 4222 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4223 assert(my_data != NULL); 4224 4225 skip_call |= parameter_validation_vkCmdCopyBufferToImage(my_data->report_data, srcBuffer, dstImage, dstImageLayout, regionCount, 4226 pRegions); 4227 4228 if (!skip_call) { 4229 PreCmdCopyBufferToImage(commandBuffer, pRegions); 4230 4231 get_dispatch_table(pc_device_table_map, commandBuffer) 4232 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); 4233 } 4234} 4235 4236bool PreCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, const VkBufferImageCopy *pRegions) { 4237 if (pRegions != nullptr) { 4238 if ((pRegions->imageSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4239 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4240 log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4241 UNRECOGNIZED_VALUE, LayerName, 4242 "vkCmdCopyImageToBuffer parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized " 4243 "enumerator"); 4244 return false; 4245 } 4246 } 4247 4248 return true; 4249} 4250 4251VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 4252 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) { 4253 bool skip_call = false; 4254 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4255 assert(my_data != NULL); 4256 4257 skip_call |= parameter_validation_vkCmdCopyImageToBuffer(my_data->report_data, srcImage, srcImageLayout, dstBuffer, regionCount, 4258 pRegions); 4259 4260 if (!skip_call) { 4261 PreCmdCopyImageToBuffer(commandBuffer, pRegions); 4262 4263 get_dispatch_table(pc_device_table_map, commandBuffer) 4264 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); 4265 } 4266} 4267 4268VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 4269 VkDeviceSize dataSize, const uint32_t *pData) { 4270 bool skip_call = false; 4271 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4272 assert(my_data != NULL); 4273 4274 skip_call |= parameter_validation_vkCmdUpdateBuffer(my_data->report_data, dstBuffer, dstOffset, dataSize, pData); 4275 4276 if (dstOffset & 3) { 4277 skip_call |= log_msg( 4278 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4279 LayerName, "CmdUpdateBuffer parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4", dstOffset); 4280 } 4281 4282 if ((dataSize <= 0) || (dataSize > 65536)) { 4283 skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, 4284 INVALID_USAGE, LayerName, "CmdUpdateBuffer parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64 4285 "), must be greater than zero and less than or equal to 65536", 4286 dataSize); 4287 } else if (dataSize & 3) { 4288 skip_call |= log_msg( 4289 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4290 LayerName, "CmdUpdateBuffer parameter, VkDeviceSize dataSize (0x%" PRIxLEAST64 "), is not a multiple of 4", dataSize); 4291 } 4292 4293 if (!skip_call) { 4294 get_dispatch_table(pc_device_table_map, commandBuffer) 4295 ->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); 4296 } 4297} 4298 4299VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 4300 VkDeviceSize size, uint32_t data) { 4301 bool skip_call = false; 4302 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4303 assert(my_data != NULL); 4304 4305 skip_call |= parameter_validation_vkCmdFillBuffer(my_data->report_data, dstBuffer, dstOffset, size, data); 4306 4307 if (dstOffset & 3) { 4308 skip_call |= log_msg( 4309 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4310 LayerName, "vkCmdFillBuffer parameter, VkDeviceSize dstOffset (0x%" PRIxLEAST64 "), is not a multiple of 4", dstOffset); 4311 } 4312 4313 if (size != VK_WHOLE_SIZE) { 4314 if (size <= 0) { 4315 skip_call |= log_msg( 4316 my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, INVALID_USAGE, 4317 LayerName, "vkCmdFillBuffer parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), must be greater than zero", size); 4318 } else if (size & 3) { 4319 skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, 4320 INVALID_USAGE, LayerName, 4321 "vkCmdFillBuffer parameter, VkDeviceSize size (0x%" PRIxLEAST64 "), is not a multiple of 4", size); 4322 } 4323 } 4324 4325 if (!skip_call) { 4326 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); 4327 } 4328} 4329 4330VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 4331 const VkClearColorValue *pColor, uint32_t rangeCount, 4332 const VkImageSubresourceRange *pRanges) { 4333 bool skip_call = false; 4334 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4335 assert(my_data != NULL); 4336 4337 skip_call |= parameter_validation_vkCmdClearColorImage(my_data->report_data, image, imageLayout, pColor, rangeCount, pRanges); 4338 4339 if (!skip_call) { 4340 get_dispatch_table(pc_device_table_map, commandBuffer) 4341 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); 4342 } 4343} 4344 4345VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 4346 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, 4347 const VkImageSubresourceRange *pRanges) { 4348 bool skip_call = false; 4349 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4350 assert(my_data != NULL); 4351 4352 skip_call |= parameter_validation_vkCmdClearDepthStencilImage(my_data->report_data, image, imageLayout, pDepthStencil, 4353 rangeCount, pRanges); 4354 4355 if (!skip_call) { 4356 get_dispatch_table(pc_device_table_map, commandBuffer) 4357 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); 4358 } 4359} 4360 4361VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, 4362 const VkClearAttachment *pAttachments, uint32_t rectCount, 4363 const VkClearRect *pRects) { 4364 bool skip_call = false; 4365 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4366 assert(my_data != NULL); 4367 4368 skip_call |= parameter_validation_vkCmdClearAttachments(my_data->report_data, attachmentCount, pAttachments, rectCount, pRects); 4369 4370 if (!skip_call) { 4371 get_dispatch_table(pc_device_table_map, commandBuffer) 4372 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); 4373 } 4374} 4375 4376bool PreCmdResolveImage(VkCommandBuffer commandBuffer, const VkImageResolve *pRegions) { 4377 if (pRegions != nullptr) { 4378 if ((pRegions->srcSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4379 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4380 log_msg( 4381 mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4382 UNRECOGNIZED_VALUE, LayerName, 4383 "vkCmdResolveImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator"); 4384 return false; 4385 } 4386 if ((pRegions->dstSubresource.aspectMask & (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | 4387 VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0) { 4388 log_msg( 4389 mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, 4390 UNRECOGNIZED_VALUE, LayerName, 4391 "vkCmdResolveImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator"); 4392 return false; 4393 } 4394 } 4395 4396 return true; 4397} 4398 4399VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 4400 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 4401 const VkImageResolve *pRegions) { 4402 bool skip_call = false; 4403 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4404 assert(my_data != NULL); 4405 4406 skip_call |= parameter_validation_vkCmdResolveImage(my_data->report_data, srcImage, srcImageLayout, dstImage, dstImageLayout, 4407 regionCount, pRegions); 4408 4409 if (!skip_call) { 4410 PreCmdResolveImage(commandBuffer, pRegions); 4411 4412 get_dispatch_table(pc_device_table_map, commandBuffer) 4413 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 4414 } 4415} 4416 4417VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 4418 bool skip_call = false; 4419 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4420 assert(my_data != NULL); 4421 4422 skip_call |= parameter_validation_vkCmdSetEvent(my_data->report_data, event, stageMask); 4423 4424 if (!skip_call) { 4425 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask); 4426 } 4427} 4428 4429VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 4430 bool skip_call = false; 4431 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4432 assert(my_data != NULL); 4433 4434 skip_call |= parameter_validation_vkCmdResetEvent(my_data->report_data, event, stageMask); 4435 4436 if (!skip_call) { 4437 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask); 4438 } 4439} 4440 4441VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, 4442 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, 4443 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 4444 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 4445 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 4446 bool skip_call = false; 4447 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4448 assert(my_data != NULL); 4449 4450 skip_call |= parameter_validation_vkCmdWaitEvents(my_data->report_data, eventCount, pEvents, srcStageMask, dstStageMask, 4451 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, 4452 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4453 4454 if (!skip_call) { 4455 get_dispatch_table(pc_device_table_map, commandBuffer) 4456 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, 4457 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4458 } 4459} 4460 4461VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, 4462 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, 4463 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 4464 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 4465 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 4466 bool skip_call = false; 4467 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4468 assert(my_data != NULL); 4469 4470 skip_call |= parameter_validation_vkCmdPipelineBarrier(my_data->report_data, srcStageMask, dstStageMask, dependencyFlags, 4471 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, 4472 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4473 4474 if (!skip_call) { 4475 get_dispatch_table(pc_device_table_map, commandBuffer) 4476 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, 4477 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 4478 } 4479} 4480 4481VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, 4482 VkQueryControlFlags flags) { 4483 bool skip_call = false; 4484 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4485 assert(my_data != NULL); 4486 4487 skip_call |= parameter_validation_vkCmdBeginQuery(my_data->report_data, queryPool, slot, flags); 4488 4489 if (!skip_call) { 4490 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, slot, flags); 4491 } 4492} 4493 4494VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) { 4495 bool skip_call = false; 4496 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4497 assert(my_data != NULL); 4498 4499 skip_call |= parameter_validation_vkCmdEndQuery(my_data->report_data, queryPool, slot); 4500 4501 if (!skip_call) { 4502 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, slot); 4503 } 4504} 4505 4506VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, 4507 uint32_t queryCount) { 4508 bool skip_call = false; 4509 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4510 assert(my_data != NULL); 4511 4512 skip_call |= parameter_validation_vkCmdResetQueryPool(my_data->report_data, queryPool, firstQuery, queryCount); 4513 4514 if (!skip_call) { 4515 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); 4516 } 4517} 4518 4519bool PostCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, 4520 uint32_t slot) { 4521 4522 ValidateEnumerator(pipelineStage); 4523 4524 return true; 4525} 4526 4527VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, 4528 VkQueryPool queryPool, uint32_t query) { 4529 bool skip_call = false; 4530 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4531 assert(my_data != NULL); 4532 4533 skip_call |= parameter_validation_vkCmdWriteTimestamp(my_data->report_data, pipelineStage, queryPool, query); 4534 4535 if (!skip_call) { 4536 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 4537 4538 PostCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 4539 } 4540} 4541 4542VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, 4543 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, 4544 VkDeviceSize stride, VkQueryResultFlags flags) { 4545 bool skip_call = false; 4546 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4547 assert(my_data != NULL); 4548 4549 skip_call |= parameter_validation_vkCmdCopyQueryPoolResults(my_data->report_data, queryPool, firstQuery, queryCount, dstBuffer, 4550 dstOffset, stride, flags); 4551 4552 if (!skip_call) { 4553 get_dispatch_table(pc_device_table_map, commandBuffer) 4554 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); 4555 } 4556} 4557 4558VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, 4559 uint32_t offset, uint32_t size, const void *pValues) { 4560 bool skip_call = false; 4561 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4562 assert(my_data != NULL); 4563 4564 skip_call |= parameter_validation_vkCmdPushConstants(my_data->report_data, layout, stageFlags, offset, size, pValues); 4565 4566 if (!skip_call) { 4567 get_dispatch_table(pc_device_table_map, commandBuffer) 4568 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); 4569 } 4570} 4571 4572VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, 4573 VkSubpassContents contents) { 4574 bool skip_call = false; 4575 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4576 assert(my_data != NULL); 4577 4578 skip_call |= parameter_validation_vkCmdBeginRenderPass(my_data->report_data, pRenderPassBegin, contents); 4579 4580 if (!skip_call) { 4581 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); 4582 } 4583} 4584 4585VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) { 4586 bool skip_call = false; 4587 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4588 assert(my_data != NULL); 4589 4590 skip_call |= parameter_validation_vkCmdNextSubpass(my_data->report_data, contents); 4591 4592 if (!skip_call) { 4593 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents); 4594 } 4595} 4596 4597VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) { 4598 get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer); 4599} 4600 4601VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, 4602 const VkCommandBuffer *pCommandBuffers) { 4603 bool skip_call = false; 4604 layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map); 4605 assert(my_data != NULL); 4606 4607 skip_call |= parameter_validation_vkCmdExecuteCommands(my_data->report_data, commandBufferCount, pCommandBuffers); 4608 4609 if (!skip_call) { 4610 get_dispatch_table(pc_device_table_map, commandBuffer) 4611 ->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); 4612 } 4613} 4614 4615VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { 4616 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 4617} 4618 4619VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 4620 VkLayerProperties *pProperties) { 4621 return util_GetLayerProperties(1, &global_layer, pCount, pProperties); 4622} 4623 4624VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 4625 VkExtensionProperties *pProperties) { 4626 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) 4627 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties); 4628 4629 return VK_ERROR_LAYER_NOT_PRESENT; 4630} 4631 4632VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, 4633 uint32_t *pCount, VkExtensionProperties *pProperties) { 4634 /* parameter_validation does not have any physical device extensions */ 4635 if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) 4636 return util_GetExtensionProperties(0, NULL, pCount, pProperties); 4637 4638 assert(physicalDevice); 4639 4640 return get_dispatch_table(pc_instance_table_map, physicalDevice) 4641 ->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties); 4642} 4643 4644// WSI Extension Functions 4645 4646VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, 4647 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) { 4648 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4649 bool skip_call = false; 4650 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 4651 assert(my_data != NULL); 4652 4653 skip_call |= parameter_validation_vkCreateSwapchainKHR(my_data->report_data, pCreateInfo, pAllocator, pSwapchain); 4654 4655 if (!skip_call) { 4656 result = get_dispatch_table(pc_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); 4657 4658 validate_result(my_data->report_data, "vkCreateSwapchainKHR", result); 4659 } 4660 4661 return result; 4662} 4663 4664VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, 4665 VkImage *pSwapchainImages) { 4666 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4667 bool skip_call = false; 4668 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 4669 assert(my_data != NULL); 4670 4671 skip_call |= 4672 parameter_validation_vkGetSwapchainImagesKHR(my_data->report_data, swapchain, pSwapchainImageCount, pSwapchainImages); 4673 4674 if (!skip_call) { 4675 result = get_dispatch_table(pc_device_table_map, device) 4676 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); 4677 4678 validate_result(my_data->report_data, "vkGetSwapchainImagesKHR", result); 4679 } 4680 4681 return result; 4682} 4683 4684VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, 4685 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) { 4686 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4687 bool skip_call = false; 4688 layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 4689 assert(my_data != NULL); 4690 4691 skip_call |= 4692 parameter_validation_vkAcquireNextImageKHR(my_data->report_data, swapchain, timeout, semaphore, fence, pImageIndex); 4693 4694 if (!skip_call) { 4695 result = get_dispatch_table(pc_device_table_map, device) 4696 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); 4697 4698 validate_result(my_data->report_data, "vkAcquireNextImageKHR", result); 4699 } 4700 4701 return result; 4702} 4703 4704VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) { 4705 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4706 bool skip_call = false; 4707 layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 4708 assert(my_data != NULL); 4709 4710 skip_call |= parameter_validation_vkQueuePresentKHR(my_data->report_data, pPresentInfo); 4711 4712 if (!skip_call) { 4713 result = get_dispatch_table(pc_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo); 4714 4715 validate_result(my_data->report_data, "vkQueuePresentKHR", result); 4716 } 4717 4718 return result; 4719} 4720 4721VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, 4722 VkSurfaceKHR surface, VkBool32 *pSupported) { 4723 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4724 bool skip_call = false; 4725 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4726 assert(my_data != NULL); 4727 4728 skip_call |= 4729 parameter_validation_vkGetPhysicalDeviceSurfaceSupportKHR(my_data->report_data, queueFamilyIndex, surface, pSupported); 4730 4731 if (!skip_call) { 4732 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4733 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); 4734 4735 validate_result(my_data->report_data, "vkGetPhysicalDeviceSurfaceSupportKHR", result); 4736 } 4737 4738 return result; 4739} 4740 4741VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 4742 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) { 4743 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4744 bool skip_call = false; 4745 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4746 assert(my_data != NULL); 4747 4748 skip_call |= 4749 parameter_validation_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(my_data->report_data, surface, pSurfaceCapabilities); 4750 4751 if (!skip_call) { 4752 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4753 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); 4754 4755 validate_result(my_data->report_data, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR", result); 4756 } 4757 4758 return result; 4759} 4760 4761VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 4762 uint32_t *pSurfaceFormatCount, 4763 VkSurfaceFormatKHR *pSurfaceFormats) { 4764 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4765 bool skip_call = false; 4766 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4767 assert(my_data != NULL); 4768 4769 skip_call |= parameter_validation_vkGetPhysicalDeviceSurfaceFormatsKHR(my_data->report_data, surface, pSurfaceFormatCount, 4770 pSurfaceFormats); 4771 4772 if (!skip_call) { 4773 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4774 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); 4775 4776 validate_result(my_data->report_data, "vkGetPhysicalDeviceSurfaceFormatsKHR", result); 4777 } 4778 4779 return result; 4780} 4781 4782VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 4783 uint32_t *pPresentModeCount, 4784 VkPresentModeKHR *pPresentModes) { 4785 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4786 bool skip_call = false; 4787 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4788 assert(my_data != NULL); 4789 4790 skip_call |= parameter_validation_vkGetPhysicalDeviceSurfacePresentModesKHR(my_data->report_data, surface, pPresentModeCount, 4791 pPresentModes); 4792 4793 if (!skip_call) { 4794 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4795 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); 4796 4797 validate_result(my_data->report_data, "vkGetPhysicalDeviceSurfacePresentModesKHR", result); 4798 } 4799 4800 return result; 4801} 4802 4803#ifdef VK_USE_PLATFORM_WIN32_KHR 4804VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, 4805 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4806 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4807 4808 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4809 assert(my_data != NULL); 4810 4811 bool skip_call = parameter_validation_vkCreateWin32SurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4812 4813 if (!skip_call) { 4814 result = 4815 get_dispatch_table(pc_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4816 } 4817 4818 validate_result(my_data->report_data, "vkCreateWin32SurfaceKHR", result); 4819 4820 return result; 4821} 4822#endif // VK_USE_PLATFORM_WIN32_KHR 4823 4824#ifdef VK_USE_PLATFORM_XCB_KHR 4825VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, 4826 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4827 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4828 4829 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4830 assert(my_data != NULL); 4831 4832 bool skip_call = parameter_validation_vkCreateXcbSurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4833 4834 if (!skip_call) { 4835 result = 4836 get_dispatch_table(pc_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4837 } 4838 4839 validate_result(my_data->report_data, "vkCreateXcbSurfaceKHR", result); 4840 4841 return result; 4842} 4843 4844VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, 4845 uint32_t queueFamilyIndex, xcb_connection_t *connection, 4846 xcb_visualid_t visual_id) { 4847 VkBool32 result = false; 4848 4849 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4850 assert(my_data != NULL); 4851 4852 bool skip_call = parameter_validation_vkGetPhysicalDeviceXcbPresentationSupportKHR(my_data->report_data, queueFamilyIndex, 4853 connection, visual_id); 4854 4855 if (!skip_call) { 4856 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4857 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id); 4858 } 4859 4860 return result; 4861} 4862#endif // VK_USE_PLATFORM_XCB_KHR 4863 4864#ifdef VK_USE_PLATFORM_XLIB_KHR 4865VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo, 4866 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4867 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4868 4869 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4870 assert(my_data != NULL); 4871 4872 bool skip_call = parameter_validation_vkCreateXlibSurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4873 4874 if (!skip_call) { 4875 result = 4876 get_dispatch_table(pc_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4877 } 4878 4879 validate_result(my_data->report_data, "vkCreateXlibSurfaceKHR", result); 4880 4881 return result; 4882} 4883 4884VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, 4885 uint32_t queueFamilyIndex, Display *dpy, 4886 VisualID visualID) { 4887 VkBool32 result = false; 4888 4889 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4890 assert(my_data != NULL); 4891 4892 bool skip_call = 4893 parameter_validation_vkGetPhysicalDeviceXlibPresentationSupportKHR(my_data->report_data, queueFamilyIndex, dpy, visualID); 4894 4895 if (!skip_call) { 4896 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4897 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID); 4898 } 4899} 4900#endif // VK_USE_PLATFORM_XLIB_KHR 4901 4902#ifdef VK_USE_PLATFORM_MIR_KHR 4903VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo, 4904 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4905 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4906 4907 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4908 assert(my_data != NULL); 4909 4910 bool skip_call = parameter_validation_vkCreateMirSurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4911 4912 if (!skip_call) { 4913 result = 4914 get_dispatch_table(pc_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4915 } 4916 4917 validate_result(my_data->report_data, "vkCreateMirSurfaceKHR", result); 4918 4919 return result; 4920} 4921 4922VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice, 4923 uint32_t queueFamilyIndex, MirConnection *connection) { 4924 VkBool32 result = false; 4925 4926 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4927 assert(my_data != NULL); 4928 4929 bool skip_call = 4930 parameter_validation_vkGetPhysicalDeviceMirPresentationSupportKHR(my_data->report_data, queueFamilyIndex, connection); 4931 4932 if (!skip_call) { 4933 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4934 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection); 4935 } 4936} 4937#endif // VK_USE_PLATFORM_MIR_KHR 4938 4939#ifdef VK_USE_PLATFORM_WAYLAND_KHR 4940VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, 4941 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4942 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4943 4944 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4945 assert(my_data != NULL); 4946 4947 bool skip_call = parameter_validation_vkCreateWaylandSurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4948 4949 if (!skip_call) { 4950 result = get_dispatch_table(pc_instance_table_map, instance) 4951 ->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4952 } 4953 4954 validate_result(my_data->report_data, "vkCreateWaylandSurfaceKHR", result); 4955 4956 return result; 4957} 4958 4959VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, 4960 uint32_t queueFamilyIndex, 4961 struct wl_display *display) { 4962 VkBool32 result = false; 4963 4964 layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 4965 assert(my_data != NULL); 4966 4967 bool skip_call = 4968 parameter_validation_vkGetPhysicalDeviceWaylandPresentationSupportKHR(my_data->report_data, queueFamilyIndex, display); 4969 4970 if (!skip_call) { 4971 result = get_dispatch_table(pc_instance_table_map, physicalDevice) 4972 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display); 4973 } 4974} 4975#endif // VK_USE_PLATFORM_WAYLAND_KHR 4976 4977#ifdef VK_USE_PLATFORM_ANDROID_KHR 4978VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo, 4979 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 4980 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 4981 4982 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 4983 assert(my_data != NULL); 4984 4985 bool skip_call = parameter_validation_vkCreateAndroidSurfaceKHR(my_data->report_data, pCreateInfo, pAllocator, pSurface); 4986 4987 if (!skip_call) { 4988 result = get_dispatch_table(pc_instance_table_map, instance) 4989 ->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 4990 } 4991 4992 validate_result(my_data->report_data, "vkCreateAndroidSurfaceKHR", result); 4993 4994 return result; 4995} 4996#endif // VK_USE_PLATFORM_ANDROID_KHR 4997 4998static PFN_vkVoidFunction intercept_core_instance_command(const char *name); 4999 5000static PFN_vkVoidFunction intercept_core_device_command(const char *name); 5001 5002static PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device); 5003 5004static PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance); 5005 5006VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) { 5007 assert(device); 5008 5009 layer_data *data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 5010 5011 if (validate_string(data->report_data, "vkGetDeviceProcAddr", "funcName", funcName)) { 5012 return NULL; 5013 } 5014 5015 PFN_vkVoidFunction proc = intercept_core_device_command(funcName); 5016 if (proc) 5017 return proc; 5018 5019 proc = InterceptWsiEnabledCommand(funcName, device); 5020 if (proc) 5021 return proc; 5022 5023 if (get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr == NULL) 5024 return NULL; 5025 return get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr(device, funcName); 5026} 5027 5028VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) { 5029 PFN_vkVoidFunction proc = intercept_core_instance_command(funcName); 5030 if (!proc) 5031 proc = intercept_core_device_command(funcName); 5032 5033 if (!proc) 5034 proc = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE)); 5035 5036 if (proc) 5037 return proc; 5038 5039 assert(instance); 5040 5041 layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 5042 5043 proc = debug_report_get_instance_proc_addr(data->report_data, funcName); 5044 if (!proc) 5045 proc = InterceptWsiEnabledCommand(funcName, instance); 5046 5047 if (proc) 5048 return proc; 5049 5050 if (get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr == NULL) 5051 return NULL; 5052 return get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName); 5053} 5054 5055static PFN_vkVoidFunction intercept_core_instance_command(const char *name) { 5056 static const struct { 5057 const char *name; 5058 PFN_vkVoidFunction proc; 5059 } core_instance_commands[] = { 5060 {"vkGetInstanceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetInstanceProcAddr)}, 5061 {"vkCreateInstance", reinterpret_cast<PFN_vkVoidFunction>(CreateInstance)}, 5062 {"vkDestroyInstance", reinterpret_cast<PFN_vkVoidFunction>(DestroyInstance)}, 5063 {"vkCreateDevice", reinterpret_cast<PFN_vkVoidFunction>(CreateDevice)}, 5064 {"vkEnumeratePhysicalDevices", reinterpret_cast<PFN_vkVoidFunction>(EnumeratePhysicalDevices)}, 5065 {"vkGetPhysicalDeviceProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceProperties)}, 5066 {"vkGetPhysicalDeviceFeatures", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceFeatures)}, 5067 {"vkGetPhysicalDeviceFormatProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceFormatProperties)}, 5068 {"vkGetPhysicalDeviceImageFormatProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceImageFormatProperties)}, 5069 {"vkGetPhysicalDeviceSparseImageFormatProperties", 5070 reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSparseImageFormatProperties)}, 5071 {"vkGetPhysicalDeviceQueueFamilyProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceQueueFamilyProperties)}, 5072 {"vkGetPhysicalDeviceMemoryProperties", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMemoryProperties)}, 5073 {"vkEnumerateInstanceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceLayerProperties)}, 5074 {"vkEnumerateDeviceLayerProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateDeviceLayerProperties)}, 5075 {"vkEnumerateInstanceExtensionProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateInstanceExtensionProperties)}, 5076 {"vkEnumerateDeviceExtensionProperties", reinterpret_cast<PFN_vkVoidFunction>(EnumerateDeviceExtensionProperties)}, 5077 }; 5078 5079 for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) { 5080 if (!strcmp(core_instance_commands[i].name, name)) 5081 return core_instance_commands[i].proc; 5082 } 5083 5084 return nullptr; 5085} 5086 5087static PFN_vkVoidFunction intercept_core_device_command(const char *name) { 5088 static const struct { 5089 const char *name; 5090 PFN_vkVoidFunction proc; 5091 } core_device_commands[] = { 5092 {"vkGetDeviceProcAddr", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceProcAddr)}, 5093 {"vkDestroyDevice", reinterpret_cast<PFN_vkVoidFunction>(DestroyDevice)}, 5094 {"vkGetDeviceQueue", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceQueue)}, 5095 {"vkQueueSubmit", reinterpret_cast<PFN_vkVoidFunction>(QueueSubmit)}, 5096 {"vkQueueWaitIdle", reinterpret_cast<PFN_vkVoidFunction>(QueueWaitIdle)}, 5097 {"vkDeviceWaitIdle", reinterpret_cast<PFN_vkVoidFunction>(DeviceWaitIdle)}, 5098 {"vkAllocateMemory", reinterpret_cast<PFN_vkVoidFunction>(AllocateMemory)}, 5099 {"vkFreeMemory", reinterpret_cast<PFN_vkVoidFunction>(FreeMemory)}, 5100 {"vkMapMemory", reinterpret_cast<PFN_vkVoidFunction>(MapMemory)}, 5101 {"vkUnmapMemory", reinterpret_cast<PFN_vkVoidFunction>(UnmapMemory)}, 5102 {"vkFlushMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(FlushMappedMemoryRanges)}, 5103 {"vkInvalidateMappedMemoryRanges", reinterpret_cast<PFN_vkVoidFunction>(InvalidateMappedMemoryRanges)}, 5104 {"vkGetDeviceMemoryCommitment", reinterpret_cast<PFN_vkVoidFunction>(GetDeviceMemoryCommitment)}, 5105 {"vkBindBufferMemory", reinterpret_cast<PFN_vkVoidFunction>(BindBufferMemory)}, 5106 {"vkBindImageMemory", reinterpret_cast<PFN_vkVoidFunction>(BindImageMemory)}, 5107 {"vkCreateFence", reinterpret_cast<PFN_vkVoidFunction>(CreateFence)}, 5108 {"vkDestroyFence", reinterpret_cast<PFN_vkVoidFunction>(DestroyFence)}, 5109 {"vkResetFences", reinterpret_cast<PFN_vkVoidFunction>(ResetFences)}, 5110 {"vkGetFenceStatus", reinterpret_cast<PFN_vkVoidFunction>(GetFenceStatus)}, 5111 {"vkWaitForFences", reinterpret_cast<PFN_vkVoidFunction>(WaitForFences)}, 5112 {"vkCreateSemaphore", reinterpret_cast<PFN_vkVoidFunction>(CreateSemaphore)}, 5113 {"vkDestroySemaphore", reinterpret_cast<PFN_vkVoidFunction>(DestroySemaphore)}, 5114 {"vkCreateEvent", reinterpret_cast<PFN_vkVoidFunction>(CreateEvent)}, 5115 {"vkDestroyEvent", reinterpret_cast<PFN_vkVoidFunction>(DestroyEvent)}, 5116 {"vkGetEventStatus", reinterpret_cast<PFN_vkVoidFunction>(GetEventStatus)}, 5117 {"vkSetEvent", reinterpret_cast<PFN_vkVoidFunction>(SetEvent)}, 5118 {"vkResetEvent", reinterpret_cast<PFN_vkVoidFunction>(ResetEvent)}, 5119 {"vkCreateQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CreateQueryPool)}, 5120 {"vkDestroyQueryPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyQueryPool)}, 5121 {"vkGetQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(GetQueryPoolResults)}, 5122 {"vkCreateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateBuffer)}, 5123 {"vkDestroyBuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyBuffer)}, 5124 {"vkCreateBufferView", reinterpret_cast<PFN_vkVoidFunction>(CreateBufferView)}, 5125 {"vkDestroyBufferView", reinterpret_cast<PFN_vkVoidFunction>(DestroyBufferView)}, 5126 {"vkCreateImage", reinterpret_cast<PFN_vkVoidFunction>(CreateImage)}, 5127 {"vkDestroyImage", reinterpret_cast<PFN_vkVoidFunction>(DestroyImage)}, 5128 {"vkGetImageSubresourceLayout", reinterpret_cast<PFN_vkVoidFunction>(GetImageSubresourceLayout)}, 5129 {"vkCreateImageView", reinterpret_cast<PFN_vkVoidFunction>(CreateImageView)}, 5130 {"vkDestroyImageView", reinterpret_cast<PFN_vkVoidFunction>(DestroyImageView)}, 5131 {"vkCreateShaderModule", reinterpret_cast<PFN_vkVoidFunction>(CreateShaderModule)}, 5132 {"vkDestroyShaderModule", reinterpret_cast<PFN_vkVoidFunction>(DestroyShaderModule)}, 5133 {"vkCreatePipelineCache", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineCache)}, 5134 {"vkDestroyPipelineCache", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineCache)}, 5135 {"vkGetPipelineCacheData", reinterpret_cast<PFN_vkVoidFunction>(GetPipelineCacheData)}, 5136 {"vkMergePipelineCaches", reinterpret_cast<PFN_vkVoidFunction>(MergePipelineCaches)}, 5137 {"vkCreateGraphicsPipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateGraphicsPipelines)}, 5138 {"vkCreateComputePipelines", reinterpret_cast<PFN_vkVoidFunction>(CreateComputePipelines)}, 5139 {"vkDestroyPipeline", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipeline)}, 5140 {"vkCreatePipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(CreatePipelineLayout)}, 5141 {"vkDestroyPipelineLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyPipelineLayout)}, 5142 {"vkCreateSampler", reinterpret_cast<PFN_vkVoidFunction>(CreateSampler)}, 5143 {"vkDestroySampler", reinterpret_cast<PFN_vkVoidFunction>(DestroySampler)}, 5144 {"vkCreateDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorSetLayout)}, 5145 {"vkDestroyDescriptorSetLayout", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorSetLayout)}, 5146 {"vkCreateDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(CreateDescriptorPool)}, 5147 {"vkDestroyDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyDescriptorPool)}, 5148 {"vkResetDescriptorPool", reinterpret_cast<PFN_vkVoidFunction>(ResetDescriptorPool)}, 5149 {"vkAllocateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(AllocateDescriptorSets)}, 5150 {"vkFreeDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(FreeDescriptorSets)}, 5151 {"vkUpdateDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(UpdateDescriptorSets)}, 5152 {"vkCmdSetViewport", reinterpret_cast<PFN_vkVoidFunction>(CmdSetViewport)}, 5153 {"vkCmdSetScissor", reinterpret_cast<PFN_vkVoidFunction>(CmdSetScissor)}, 5154 {"vkCmdSetLineWidth", reinterpret_cast<PFN_vkVoidFunction>(CmdSetLineWidth)}, 5155 {"vkCmdSetDepthBias", reinterpret_cast<PFN_vkVoidFunction>(CmdSetDepthBias)}, 5156 {"vkCmdSetBlendConstants", reinterpret_cast<PFN_vkVoidFunction>(CmdSetBlendConstants)}, 5157 {"vkCmdSetDepthBounds", reinterpret_cast<PFN_vkVoidFunction>(CmdSetDepthBounds)}, 5158 {"vkCmdSetStencilCompareMask", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilCompareMask)}, 5159 {"vkCmdSetStencilWriteMask", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilWriteMask)}, 5160 {"vkCmdSetStencilReference", reinterpret_cast<PFN_vkVoidFunction>(CmdSetStencilReference)}, 5161 {"vkAllocateCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(AllocateCommandBuffers)}, 5162 {"vkFreeCommandBuffers", reinterpret_cast<PFN_vkVoidFunction>(FreeCommandBuffers)}, 5163 {"vkBeginCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(BeginCommandBuffer)}, 5164 {"vkEndCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(EndCommandBuffer)}, 5165 {"vkResetCommandBuffer", reinterpret_cast<PFN_vkVoidFunction>(ResetCommandBuffer)}, 5166 {"vkCmdBindPipeline", reinterpret_cast<PFN_vkVoidFunction>(CmdBindPipeline)}, 5167 {"vkCmdBindDescriptorSets", reinterpret_cast<PFN_vkVoidFunction>(CmdBindDescriptorSets)}, 5168 {"vkCmdBindVertexBuffers", reinterpret_cast<PFN_vkVoidFunction>(CmdBindVertexBuffers)}, 5169 {"vkCmdBindIndexBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdBindIndexBuffer)}, 5170 {"vkCmdDraw", reinterpret_cast<PFN_vkVoidFunction>(CmdDraw)}, 5171 {"vkCmdDrawIndexed", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexed)}, 5172 {"vkCmdDrawIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndirect)}, 5173 {"vkCmdDrawIndexedIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDrawIndexedIndirect)}, 5174 {"vkCmdDispatch", reinterpret_cast<PFN_vkVoidFunction>(CmdDispatch)}, 5175 {"vkCmdDispatchIndirect", reinterpret_cast<PFN_vkVoidFunction>(CmdDispatchIndirect)}, 5176 {"vkCmdCopyBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBuffer)}, 5177 {"vkCmdCopyImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImage)}, 5178 {"vkCmdBlitImage", reinterpret_cast<PFN_vkVoidFunction>(CmdBlitImage)}, 5179 {"vkCmdCopyBufferToImage", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyBufferToImage)}, 5180 {"vkCmdCopyImageToBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyImageToBuffer)}, 5181 {"vkCmdUpdateBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdUpdateBuffer)}, 5182 {"vkCmdFillBuffer", reinterpret_cast<PFN_vkVoidFunction>(CmdFillBuffer)}, 5183 {"vkCmdClearColorImage", reinterpret_cast<PFN_vkVoidFunction>(CmdClearColorImage)}, 5184 {"vkCmdClearDepthStencilImage", reinterpret_cast<PFN_vkVoidFunction>(CmdClearDepthStencilImage)}, 5185 {"vkCmdClearAttachments", reinterpret_cast<PFN_vkVoidFunction>(CmdClearAttachments)}, 5186 {"vkCmdResolveImage", reinterpret_cast<PFN_vkVoidFunction>(CmdResolveImage)}, 5187 {"vkCmdSetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdSetEvent)}, 5188 {"vkCmdResetEvent", reinterpret_cast<PFN_vkVoidFunction>(CmdResetEvent)}, 5189 {"vkCmdWaitEvents", reinterpret_cast<PFN_vkVoidFunction>(CmdWaitEvents)}, 5190 {"vkCmdPipelineBarrier", reinterpret_cast<PFN_vkVoidFunction>(CmdPipelineBarrier)}, 5191 {"vkCmdBeginQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginQuery)}, 5192 {"vkCmdEndQuery", reinterpret_cast<PFN_vkVoidFunction>(CmdEndQuery)}, 5193 {"vkCmdResetQueryPool", reinterpret_cast<PFN_vkVoidFunction>(CmdResetQueryPool)}, 5194 {"vkCmdWriteTimestamp", reinterpret_cast<PFN_vkVoidFunction>(CmdWriteTimestamp)}, 5195 {"vkCmdCopyQueryPoolResults", reinterpret_cast<PFN_vkVoidFunction>(CmdCopyQueryPoolResults)}, 5196 {"vkCmdPushConstants", reinterpret_cast<PFN_vkVoidFunction>(CmdPushConstants)}, 5197 {"vkCreateFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(CreateFramebuffer)}, 5198 {"vkDestroyFramebuffer", reinterpret_cast<PFN_vkVoidFunction>(DestroyFramebuffer)}, 5199 {"vkCreateRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CreateRenderPass)}, 5200 {"vkDestroyRenderPass", reinterpret_cast<PFN_vkVoidFunction>(DestroyRenderPass)}, 5201 {"vkGetRenderAreaGranularity", reinterpret_cast<PFN_vkVoidFunction>(GetRenderAreaGranularity)}, 5202 {"vkCreateCommandPool", reinterpret_cast<PFN_vkVoidFunction>(CreateCommandPool)}, 5203 {"vkDestroyCommandPool", reinterpret_cast<PFN_vkVoidFunction>(DestroyCommandPool)}, 5204 {"vkResetCommandPool", reinterpret_cast<PFN_vkVoidFunction>(ResetCommandPool)}, 5205 {"vkCmdBeginRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CmdBeginRenderPass)}, 5206 {"vkCmdNextSubpass", reinterpret_cast<PFN_vkVoidFunction>(CmdNextSubpass)}, 5207 {"vkCmdExecuteCommands", reinterpret_cast<PFN_vkVoidFunction>(CmdExecuteCommands)}, 5208 {"vkCmdEndRenderPass", reinterpret_cast<PFN_vkVoidFunction>(CmdEndRenderPass)}, 5209 }; 5210 5211 for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) { 5212 if (!strcmp(core_device_commands[i].name, name)) 5213 return core_device_commands[i].proc; 5214 } 5215 5216 return nullptr; 5217} 5218 5219static PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) { 5220 static const struct { 5221 const char *name; 5222 PFN_vkVoidFunction proc; 5223 } wsi_device_commands[] = { 5224 {"vkCreateSwapchainKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR)}, 5225 {"vkGetSwapchainImagesKHR", reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR)}, 5226 {"vkAcquireNextImageKHR", reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR)}, 5227 {"vkQueuePresentKHR", reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR)}, 5228 }; 5229 5230 if (device) { 5231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 5232 if (!device_data->wsi_enabled) 5233 return nullptr; 5234 } 5235 5236 for (size_t i = 0; i < ARRAY_SIZE(wsi_device_commands); i++) { 5237 if (!strcmp(wsi_device_commands[i].name, name)) 5238 return wsi_device_commands[i].proc; 5239 } 5240 5241 return nullptr; 5242} 5243 5244static PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) { 5245 static const struct { 5246 const char *name; 5247 PFN_vkVoidFunction proc; 5248 } wsi_instance_commands[] = { 5249 {"vkGetPhysicalDeviceSurfaceSupportKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR)}, 5250 {"vkGetPhysicalDeviceSurfaceCapabilitiesKHR", 5251 reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR)}, 5252 {"vkGetPhysicalDeviceSurfaceFormatsKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR)}, 5253 {"vkGetPhysicalDeviceSurfacePresentModesKHR", 5254 reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR)}, 5255 }; 5256 5257 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance); 5258 if (instance_extension_map.size() == 0 || !instance_extension_map[pTable].wsi_enabled) 5259 return nullptr; 5260 5261 for (size_t i = 0; i < ARRAY_SIZE(wsi_instance_commands); i++) { 5262 if (!strcmp(wsi_instance_commands[i].name, name)) 5263 return wsi_instance_commands[i].proc; 5264 } 5265 5266#ifdef VK_USE_PLATFORM_WIN32_KHR 5267 if ((instance_extension_map[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name)) 5268 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR); 5269#endif // VK_USE_PLATFORM_WIN32_KHR 5270#ifdef VK_USE_PLATFORM_XCB_KHR 5271 if ((instance_extension_map[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name)) 5272 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR); 5273 if ((instance_extension_map[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name)) 5274 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR); 5275#endif // VK_USE_PLATFORM_XCB_KHR 5276#ifdef VK_USE_PLATFORM_XLIB_KHR 5277 if ((instance_extension_map[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name)) 5278 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR); 5279 if ((instance_extension_map[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name)) 5280 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR); 5281#endif // VK_USE_PLATFORM_XLIB_KHR 5282#ifdef VK_USE_PLATFORM_MIR_KHR 5283 if ((instance_extension_map[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name)) 5284 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR); 5285 if ((instance_extension_map[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name)) 5286 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR); 5287#endif // VK_USE_PLATFORM_MIR_KHR 5288#ifdef VK_USE_PLATFORM_WAYLAND_KHR 5289 if ((instance_extension_map[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name)) 5290 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR); 5291 if ((instance_extension_map[pTable].wayland_enabled == true) && 5292 !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name)) 5293 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR); 5294#endif // VK_USE_PLATFORM_WAYLAND_KHR 5295#ifdef VK_USE_PLATFORM_ANDROID_KHR 5296 if ((instance_extension_map[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name)) 5297 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR); 5298#endif // VK_USE_PLATFORM_ANDROID_KHR 5299 5300 return nullptr; 5301} 5302 5303} // namespace parameter_validation 5304 5305// vk_layer_logging.h expects these to be defined 5306 5307VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance, 5308 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 5309 const VkAllocationCallbacks *pAllocator, 5310 VkDebugReportCallbackEXT *pMsgCallback) { 5311 return parameter_validation::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback); 5312} 5313 5314VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback, 5315 const VkAllocationCallbacks *pAllocator) { 5316 parameter_validation::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 5317} 5318 5319VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, 5320 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location, 5321 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 5322 parameter_validation::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 5323} 5324 5325// loader-layer interface v0 5326 5327VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 5328 VkExtensionProperties *pProperties) { 5329 return parameter_validation::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties); 5330} 5331 5332VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount, 5333 VkLayerProperties *pProperties) { 5334 return parameter_validation::EnumerateInstanceLayerProperties(pCount, pProperties); 5335} 5336 5337VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 5338 VkLayerProperties *pProperties) { 5339 // the layer command handles VK_NULL_HANDLE just fine internally 5340 assert(physicalDevice == VK_NULL_HANDLE); 5341 return parameter_validation::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties); 5342} 5343 5344VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, 5345 const char *pLayerName, uint32_t *pCount, 5346 VkExtensionProperties *pProperties) { 5347 // the layer command handles VK_NULL_HANDLE just fine internally 5348 assert(physicalDevice == VK_NULL_HANDLE); 5349 return parameter_validation::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties); 5350} 5351 5352VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) { 5353 return parameter_validation::GetDeviceProcAddr(dev, funcName); 5354} 5355 5356VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) { 5357 return parameter_validation::GetInstanceProcAddr(instance, funcName); 5358} 5359