1/* 2 * Copyright (c) 2015-2016 The Khronos Group Inc. 3 * Copyright (c) 2015-2016 Valve Corporation 4 * Copyright (c) 2015-2016 LunarG, Inc. 5 * Copyright (c) 2015-2016 Google, Inc. 6 * 7 * Licensed under the Apache License, Version 2.0 (the "License"); 8 * you may not use this file except in compliance with the License. 9 * You may obtain a copy of the License at 10 * 11 * http://www.apache.org/licenses/LICENSE-2.0 12 * 13 * Unless required by applicable law or agreed to in writing, software 14 * distributed under the License is distributed on an "AS IS" BASIS, 15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 * See the License for the specific language governing permissions and 17 * limitations under the License. 18 * 19 * Author: Mark Lobodzinski <mark@lunarg.com> 20 * Author: Tobin Ehlis <tobine@google.com> 21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com> 22 * Author: Jon Ashburn <jon@lunarg.com> 23 * Author: Mike Stroyan <stroyan@google.com> 24 * Author: Tony Barbour <tony@LunarG.com> 25 */ 26 27#include "vk_loader_platform.h" 28#include "vulkan/vulkan.h" 29 30#include <cinttypes> 31#include <stdio.h> 32#include <stdlib.h> 33#include <string.h> 34 35#include <unordered_map> 36 37#include "vk_layer_config.h" 38#include "vk_layer_data.h" 39#include "vk_layer_logging.h" 40#include "vk_layer_table.h" 41#include "vulkan/vk_layer.h" 42 43#include "object_tracker.h" 44 45#include "vk_validation_error_messages.h" 46 47namespace object_tracker { 48 49static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) { 50 51 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker"); 52} 53 54// Add new queue to head of global queue list 55static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) { 56 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 57 auto queueItem = device_data->queue_info_map.find(queue); 58 if (queueItem == device_data->queue_info_map.end()) { 59 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO; 60 if (p_queue_info != NULL) { 61 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO)); 62 p_queue_info->queue = queue; 63 p_queue_info->queue_node_index = queue_node_index; 64 device_data->queue_info_map[queue] = p_queue_info; 65 } else { 66 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, 67 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName, 68 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information"); 69 } 70 } 71} 72 73// Destroy memRef lists and free all memory 74static void DestroyQueueDataStructures(VkDevice device) { 75 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 76 77 for (auto queue_item : device_data->queue_info_map) { 78 delete queue_item.second; 79 } 80 device_data->queue_info_map.clear(); 81 82 // Destroy the items in the queue map 83 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin(); 84 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) { 85 uint32_t obj_index = queue->second->object_type; 86 assert(device_data->num_total_objects > 0); 87 device_data->num_total_objects--; 88 assert(device_data->num_objects[obj_index] > 0); 89 device_data->num_objects[obj_index]--; 90 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle, 91 __LINE__, OBJTRACK_NONE, LayerName, 92 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).", 93 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]); 94 delete queue->second; 95 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue); 96 } 97} 98 99// Check Queue type flags for selected queue operations 100static void ValidateQueueFlags(VkQueue queue, const char *function) { 101 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map); 102 auto queue_item = device_data->queue_info_map.find(queue); 103 if (queue_item != device_data->queue_info_map.end()) { 104 OT_QUEUE_INFO *pQueueInfo = queue_item->second; 105 if (pQueueInfo != NULL) { 106 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map); 107 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) == 108 0) { 109 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, 110 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName, 111 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function); 112 } 113 } 114 } 115} 116 117static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer, 118 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) { 119 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 120 121 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer), 122 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, 123 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer)); 124 125 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE; 126 pNewObjNode->object_type = object_type; 127 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer); 128 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool); 129 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) { 130 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY; 131 } else { 132 pNewObjNode->status = OBJSTATUS_NONE; 133 } 134 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode; 135 device_data->num_objects[object_type]++; 136 device_data->num_total_objects++; 137} 138 139static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) { 140 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 141 bool skip_call = false; 142 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer); 143 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) != 144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) { 145 OBJTRACK_NODE *pNode = 146 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)]; 147 148 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) { 149 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle, 150 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName, 151 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64 152 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").", 153 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object, 154 reinterpret_cast<uint64_t &>(command_pool)); 155 } 156 } else { 157 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, 158 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64 159 ". Was it created? Has it already been destroyed?", 160 object_handle); 161 } 162 return skip_call; 163} 164 165static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set, 166 VkDebugReportObjectTypeEXT object_type) { 167 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 168 169 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, 170 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName, 171 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type], 172 reinterpret_cast<uint64_t &>(descriptor_set)); 173 174 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE; 175 pNewObjNode->object_type = object_type; 176 pNewObjNode->status = OBJSTATUS_NONE; 177 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set); 178 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool); 179 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] = 180 pNewObjNode; 181 device_data->num_objects[object_type]++; 182 device_data->num_total_objects++; 183} 184 185static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) { 186 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 187 bool skip_call = false; 188 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set); 189 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle); 190 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) { 191 OBJTRACK_NODE *pNode = dsItem->second; 192 193 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) { 194 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle, 195 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName, 196 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64 197 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").", 198 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object, 199 reinterpret_cast<uint64_t &>(descriptor_pool)); 200 } 201 } else { 202 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, 203 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64 204 ". Was it created? Has it already been destroyed?", 205 object_handle); 206 } 207 return skip_call; 208} 209 210static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) { 211 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 212 213 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__, 214 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, 215 object_name[object_type], reinterpret_cast<uint64_t>(vkObj)); 216 217 OBJTRACK_NODE *p_obj_node = NULL; 218 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj)); 219 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) { 220 p_obj_node = new OBJTRACK_NODE; 221 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node; 222 device_data->num_objects[object_type]++; 223 device_data->num_total_objects++; 224 } else { 225 p_obj_node = queue_item->second; 226 } 227 p_obj_node->object_type = object_type; 228 p_obj_node->status = OBJSTATUS_NONE; 229 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj); 230} 231 232static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) { 233 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map); 234 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 235 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName, 236 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage", 237 reinterpret_cast<uint64_t &>(swapchain_image)); 238 239 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE; 240 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT; 241 pNewObjNode->status = OBJSTATUS_NONE; 242 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image); 243 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain); 244 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode; 245} 246 247template<typename T> 248uint64_t handle_value(T handle) { 249 return reinterpret_cast<uint64_t &>(handle); 250} 251template<typename T> 252uint64_t handle_value(T *handle) { 253 return reinterpret_cast<uint64_t>(handle); 254} 255 256template <typename T1, typename T2> 257static void CreateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, const VkAllocationCallbacks *pAllocator) { 258 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map); 259 260 auto object_handle = handle_value(object); 261 bool custom_allocator = pAllocator != nullptr; 262 263 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, object_handle, 264 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, 265 object_name[object_type], object_handle); 266 267 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE; 268 pNewObjNode->object_type = object_type; 269 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE; 270 pNewObjNode->handle = object_handle; 271 instance_data->object_map[object_type][object_handle] = pNewObjNode; 272 instance_data->num_objects[object_type]++; 273 instance_data->num_total_objects++; 274} 275 276template <typename T1, typename T2> 277static void DestroyObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, const VkAllocationCallbacks *pAllocator) { 278 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map); 279 280 auto object_handle = handle_value(object); 281 bool custom_allocator = pAllocator != nullptr; 282 283 auto item = device_data->object_map[object_type].find(object_handle); 284 if (item != device_data->object_map[object_type].end()) { 285 286 OBJTRACK_NODE *pNode = item->second; 287 assert(device_data->num_total_objects > 0); 288 device_data->num_total_objects--; 289 assert(device_data->num_objects[pNode->object_type] > 0); 290 device_data->num_objects[pNode->object_type]--; 291 292 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__, 293 OBJTRACK_NONE, LayerName, 294 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).", 295 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects, 296 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]); 297 298 auto allocated_with_custom = (pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false; 299 if (custom_allocator ^ allocated_with_custom) { 300 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__, 301 OBJTRACK_ALLOCATOR_MISMATCH, LayerName, 302 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation", 303 (custom_allocator ? "" : "not "), object_name[object_type], object_handle, 304 (allocated_with_custom ? "" : "not ")); 305 } 306 307 delete pNode; 308 device_data->object_map[object_type].erase(item); 309 } else { 310 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__, 311 OBJTRACK_UNKNOWN_OBJECT, LayerName, 312 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?", 313 object_name[object_type], object_handle); 314 } 315} 316 317template <typename T1, typename T2> 318static bool ValidateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool null_allowed, 319 int error_code = -1) { 320 if (null_allowed && (object == VK_NULL_HANDLE)) { 321 return false; 322 } 323 auto object_handle = handle_value(object); 324 325 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map); 326 if (device_data->object_map[object_type].find(object_handle) == device_data->object_map[object_type].end()) { 327 // If object is an image, also look for it in the swapchain image map 328 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) || 329 (device_data->swapchainImageMap.find(object_handle) == device_data->swapchainImageMap.end())) { 330 const char *error_msg = (error_code == -1) ? "" : validation_error_map[error_code]; 331 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__, 332 error_code, LayerName, "Invalid %s Object 0x%" PRIxLEAST64 ". %s", object_name[object_type], 333 object_handle, error_msg); 334 } 335 } 336 return false; 337} 338 339static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) { 340 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 341 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) { 342 OBJTRACK_NODE *object_info = item->second; 343 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__, 344 OBJTRACK_OBJECT_LEAK, LayerName, 345 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.", 346 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle); 347 item = device_data->object_map[object_type].erase(item); 348 } 349} 350 351VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) { 352 std::unique_lock<std::mutex> lock(global_lock); 353 354 dispatch_key key = get_dispatch_key(instance); 355 layer_data *instance_data = get_my_data_ptr(key, layer_data_map); 356 357 // Enable the temporary callback(s) here to catch cleanup issues: 358 bool callback_setup = false; 359 if (instance_data->num_tmp_callbacks > 0) { 360 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, 361 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) { 362 callback_setup = true; 363 } 364 } 365 366 ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, true, VALIDATION_ERROR_00021); 367 368 DestroyObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator); 369 // Report any remaining objects in LL 370 371 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin(); 372 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) { 373 OBJTRACK_NODE *pNode = iit->second; 374 375 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle); 376 377 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__, 378 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.", 379 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle); 380 // Semaphore: 381 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT); 382 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT); 383 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT); 384 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT); 385 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT); 386 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT); 387 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT); 388 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT); 389 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT); 390 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT); 391 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT); 392 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT); 393 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT); 394 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT); 395 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT); 396 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT); 397 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT); 398 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT); 399 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT); 400 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT); 401 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT); 402 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT); 403 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT); 404 } 405 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear(); 406 407 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance); 408 pInstanceTable->DestroyInstance(instance, pAllocator); 409 410 // Disable and cleanup the temporary callback(s): 411 if (callback_setup) { 412 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks); 413 } 414 if (instance_data->num_tmp_callbacks > 0) { 415 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks); 416 instance_data->num_tmp_callbacks = 0; 417 } 418 419 // Clean up logging callback, if any 420 while (instance_data->logging_callback.size() > 0) { 421 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back(); 422 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator); 423 instance_data->logging_callback.pop_back(); 424 } 425 426 layer_debug_report_destroy_instance(instance_data->report_data); 427 layer_data_map.erase(key); 428 429 instanceExtMap.erase(pInstanceTable); 430 lock.unlock(); 431 ot_instance_table_map.erase(key); 432} 433 434VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { 435 436 std::unique_lock<std::mutex> lock(global_lock); 437 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, true, VALIDATION_ERROR_00052); 438 DestroyObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator); 439 440 // Report any remaining objects associated with this VkDevice object in LL 441 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT); 442 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT); 443 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT); 444 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT); 445 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT); 446 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT); 447 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT); 448 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT); 449 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT); 450 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT); 451 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT); 452 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT); 453 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT); 454 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT); 455 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT); 456 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT); 457 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT); 458 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT); 459 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT); 460 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT); 461 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT); 462 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT); 463 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT); 464 465 // Clean up Queue's MemRef Linked Lists 466 DestroyQueueDataStructures(device); 467 468 lock.unlock(); 469 470 dispatch_key key = get_dispatch_key(device); 471 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device); 472 pDisp->DestroyDevice(device, pAllocator); 473 ot_device_table_map.erase(key); 474} 475 476VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) { 477 bool skip_call = false; 478 { 479 std::lock_guard<std::mutex> lock(global_lock); 480 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 481 VALIDATION_ERROR_01679); 482 } 483 if (skip_call) { 484 return; 485 } 486 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures); 487} 488 489VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 490 VkFormatProperties *pFormatProperties) { 491 bool skip_call = false; 492 { 493 std::lock_guard<std::mutex> lock(global_lock); 494 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 495 VALIDATION_ERROR_01683); 496 } 497 if (skip_call) { 498 return; 499 } 500 get_dispatch_table(ot_instance_table_map, physicalDevice) 501 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); 502} 503 504VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 505 VkImageType type, VkImageTiling tiling, 506 VkImageUsageFlags usage, VkImageCreateFlags flags, 507 VkImageFormatProperties *pImageFormatProperties) { 508 bool skip_call = false; 509 { 510 std::lock_guard<std::mutex> lock(global_lock); 511 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 512 VALIDATION_ERROR_01686); 513 } 514 if (skip_call) { 515 return VK_ERROR_VALIDATION_FAILED_EXT; 516 } 517 VkResult result = 518 get_dispatch_table(ot_instance_table_map, physicalDevice) 519 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); 520 return result; 521} 522 523VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) { 524 bool skip_call = false; 525 { 526 std::lock_guard<std::mutex> lock(global_lock); 527 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 528 VALIDATION_ERROR_00026); 529 } 530 if (skip_call) { 531 return; 532 } 533 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties); 534} 535 536VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, 537 VkPhysicalDeviceMemoryProperties *pMemoryProperties) { 538 bool skip_call = false; 539 { 540 std::lock_guard<std::mutex> lock(global_lock); 541 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 542 VALIDATION_ERROR_00609); 543 } 544 if (skip_call) { 545 return; 546 } 547 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); 548} 549 550VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName); 551 552VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName); 553 554VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, 555 VkExtensionProperties *pProperties); 556 557VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties); 558 559VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, 560 VkLayerProperties *pProperties); 561 562VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) { 563 bool skip_call = false; 564 { 565 std::lock_guard<std::mutex> lock(global_lock); 566 skip_call |= ValidateObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true, VALIDATION_ERROR_00130); 567 if (pSubmits) { 568 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) { 569 if (pSubmits[idx0].pCommandBuffers) { 570 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) { 571 skip_call |= ValidateObject(queue, pSubmits[idx0].pCommandBuffers[idx1], 572 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, VALIDATION_ERROR_00149); 573 } 574 } 575 if (pSubmits[idx0].pSignalSemaphores) { 576 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) { 577 skip_call |= ValidateObject(queue, pSubmits[idx0].pSignalSemaphores[idx2], 578 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false, VALIDATION_ERROR_00150); 579 } 580 } 581 if (pSubmits[idx0].pWaitSemaphores) { 582 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) { 583 skip_call |= ValidateObject(queue, pSubmits[idx0].pWaitSemaphores[idx3], 584 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false, VALIDATION_ERROR_00146); 585 } 586 } 587 } 588 } 589 if (queue) { 590 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false, VALIDATION_ERROR_00128); 591 } 592 } 593 if (skip_call) { 594 return VK_ERROR_VALIDATION_FAILED_EXT; 595 } 596 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence); 597 return result; 598} 599 600VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) { 601 bool skip_call = false; 602 { 603 std::lock_guard<std::mutex> lock(global_lock); 604 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false, VALIDATION_ERROR_00317); 605 } 606 if (skip_call) { 607 return VK_ERROR_VALIDATION_FAILED_EXT; 608 } 609 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue); 610 return result; 611} 612 613VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) { 614 bool skip_call = false; 615 { 616 std::lock_guard<std::mutex> lock(global_lock); 617 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00318); 618 } 619 if (skip_call) { 620 return VK_ERROR_VALIDATION_FAILED_EXT; 621 } 622 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device); 623 return result; 624} 625 626VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, 627 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) { 628 bool skip_call = false; 629 { 630 std::lock_guard<std::mutex> lock(global_lock); 631 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00612); 632 } 633 if (skip_call) { 634 return VK_ERROR_VALIDATION_FAILED_EXT; 635 } 636 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory); 637 { 638 std::lock_guard<std::mutex> lock(global_lock); 639 if (result == VK_SUCCESS) { 640 CreateObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator); 641 } 642 } 643 return result; 644} 645 646VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, 647 const VkMappedMemoryRange *pMemoryRanges) { 648 bool skip_call = false; 649 { 650 std::lock_guard<std::mutex> lock(global_lock); 651 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00635); 652 if (pMemoryRanges) { 653 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) { 654 if (pMemoryRanges[idx0].memory) { 655 skip_call |= ValidateObject(device, pMemoryRanges[idx0].memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 656 false, VALIDATION_ERROR_00648); 657 } 658 } 659 } 660 } 661 if (skip_call) { 662 return VK_ERROR_VALIDATION_FAILED_EXT; 663 } 664 VkResult result = 665 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 666 return result; 667} 668 669VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, 670 const VkMappedMemoryRange *pMemoryRanges) { 671 bool skip_call = false; 672 { 673 std::lock_guard<std::mutex> lock(global_lock); 674 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00638); 675 if (pMemoryRanges) { 676 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) { 677 if (pMemoryRanges[idx0].memory) { 678 skip_call |= ValidateObject(device, pMemoryRanges[idx0].memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 679 false, VALIDATION_ERROR_00648); 680 } 681 } 682 } 683 } 684 if (skip_call) { 685 return VK_ERROR_VALIDATION_FAILED_EXT; 686 } 687 VkResult result = 688 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); 689 return result; 690} 691 692VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, 693 VkDeviceSize *pCommittedMemoryInBytes) { 694 bool skip_call = false; 695 { 696 std::lock_guard<std::mutex> lock(global_lock); 697 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00654); 698 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false, VALIDATION_ERROR_00655); 699 } 700 if (skip_call) { 701 return; 702 } 703 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); 704} 705 706VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, 707 VkDeviceSize memoryOffset) { 708 bool skip_call = false; 709 { 710 std::lock_guard<std::mutex> lock(global_lock); 711 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00799); 712 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00798); 713 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false, VALIDATION_ERROR_00800); 714 } 715 if (skip_call) { 716 return VK_ERROR_VALIDATION_FAILED_EXT; 717 } 718 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset); 719 return result; 720} 721 722VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) { 723 bool skip_call = false; 724 { 725 std::lock_guard<std::mutex> lock(global_lock); 726 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00807); 727 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00808); 728 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false, VALIDATION_ERROR_00809); 729 } 730 if (skip_call) { 731 return VK_ERROR_VALIDATION_FAILED_EXT; 732 } 733 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset); 734 return result; 735} 736 737VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, 738 VkMemoryRequirements *pMemoryRequirements) { 739 bool skip_call = false; 740 { 741 std::lock_guard<std::mutex> lock(global_lock); 742 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00784); 743 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00783); 744 } 745 if (skip_call) { 746 return; 747 } 748 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements); 749} 750 751VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) { 752 bool skip_call = false; 753 { 754 std::lock_guard<std::mutex> lock(global_lock); 755 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00787); 756 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00788); 757 } 758 if (skip_call) { 759 return; 760 } 761 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements); 762} 763 764VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, 765 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { 766 bool skip_call = false; 767 { 768 std::lock_guard<std::mutex> lock(global_lock); 769 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_01610); 770 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01611); 771 } 772 if (skip_call) { 773 return; 774 } 775 get_dispatch_table(ot_device_table_map, device) 776 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); 777} 778 779VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, 780 VkImageType type, VkSampleCountFlagBits samples, 781 VkImageUsageFlags usage, VkImageTiling tiling, 782 uint32_t *pPropertyCount, 783 VkSparseImageFormatProperties *pProperties) { 784 bool skip_call = false; 785 { 786 std::lock_guard<std::mutex> lock(global_lock); 787 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false, 788 VALIDATION_ERROR_01601); 789 } 790 if (skip_call) { 791 return; 792 } 793 get_dispatch_table(ot_instance_table_map, physicalDevice) 794 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, 795 pProperties); 796} 797 798VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, 799 const VkAllocationCallbacks *pAllocator, VkFence *pFence) { 800 bool skip_call = false; 801 { 802 std::lock_guard<std::mutex> lock(global_lock); 803 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00166); 804 } 805 if (skip_call) { 806 return VK_ERROR_VALIDATION_FAILED_EXT; 807 } 808 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence); 809 { 810 std::lock_guard<std::mutex> lock(global_lock); 811 if (result == VK_SUCCESS) { 812 CreateObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator); 813 } 814 } 815 return result; 816} 817 818VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) { 819 bool skip_call = false; 820 { 821 std::lock_guard<std::mutex> lock(global_lock); 822 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00176); 823 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true, VALIDATION_ERROR_00177); 824 } 825 if (skip_call) { 826 return; 827 } 828 { 829 std::lock_guard<std::mutex> lock(global_lock); 830 DestroyObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator); 831 } 832 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator); 833} 834 835VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) { 836 bool skip_call = false; 837 { 838 std::lock_guard<std::mutex> lock(global_lock); 839 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00184); 840 if (pFences) { 841 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) { 842 skip_call |= 843 ValidateObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false, VALIDATION_ERROR_00187); 844 } 845 } 846 } 847 if (skip_call) { 848 return VK_ERROR_VALIDATION_FAILED_EXT; 849 } 850 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences); 851 return result; 852} 853 854VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) { 855 bool skip_call = false; 856 { 857 std::lock_guard<std::mutex> lock(global_lock); 858 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00180); 859 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false, VALIDATION_ERROR_00181); 860 } 861 if (skip_call) { 862 return VK_ERROR_VALIDATION_FAILED_EXT; 863 } 864 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence); 865 return result; 866} 867 868VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, 869 uint64_t timeout) { 870 bool skip_call = false; 871 { 872 std::lock_guard<std::mutex> lock(global_lock); 873 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00188); 874 if (pFences) { 875 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) { 876 skip_call |= 877 ValidateObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false, VALIDATION_ERROR_00191); 878 } 879 } 880 } 881 if (skip_call) { 882 return VK_ERROR_VALIDATION_FAILED_EXT; 883 } 884 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout); 885 return result; 886} 887 888VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, 889 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) { 890 bool skip_call = false; 891 { 892 std::lock_guard<std::mutex> lock(global_lock); 893 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00192); 894 } 895 if (skip_call) { 896 return VK_ERROR_VALIDATION_FAILED_EXT; 897 } 898 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); 899 { 900 std::lock_guard<std::mutex> lock(global_lock); 901 if (result == VK_SUCCESS) { 902 CreateObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator); 903 } 904 } 905 return result; 906} 907 908VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) { 909 bool skip_call = false; 910 { 911 std::lock_guard<std::mutex> lock(global_lock); 912 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00202); 913 skip_call |= ValidateObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true, VALIDATION_ERROR_00203); 914 } 915 if (skip_call) { 916 return; 917 } 918 { 919 std::lock_guard<std::mutex> lock(global_lock); 920 DestroyObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator); 921 } 922 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator); 923} 924 925VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, 926 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) { 927 bool skip_call = false; 928 { 929 std::lock_guard<std::mutex> lock(global_lock); 930 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00206); 931 } 932 if (skip_call) { 933 return VK_ERROR_VALIDATION_FAILED_EXT; 934 } 935 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent); 936 { 937 std::lock_guard<std::mutex> lock(global_lock); 938 if (result == VK_SUCCESS) { 939 CreateObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator); 940 } 941 } 942 return result; 943} 944 945VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) { 946 bool skip_call = false; 947 { 948 std::lock_guard<std::mutex> lock(global_lock); 949 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00216); 950 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, true, VALIDATION_ERROR_00217); 951 } 952 if (skip_call) { 953 return; 954 } 955 { 956 std::lock_guard<std::mutex> lock(global_lock); 957 DestroyObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator); 958 } 959 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator); 960} 961 962VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) { 963 bool skip_call = false; 964 { 965 std::lock_guard<std::mutex> lock(global_lock); 966 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00220); 967 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, VALIDATION_ERROR_00221); 968 } 969 if (skip_call) { 970 return VK_ERROR_VALIDATION_FAILED_EXT; 971 } 972 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event); 973 return result; 974} 975 976VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) { 977 bool skip_call = false; 978 { 979 std::lock_guard<std::mutex> lock(global_lock); 980 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00223); 981 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, VALIDATION_ERROR_00224); 982 } 983 if (skip_call) { 984 return VK_ERROR_VALIDATION_FAILED_EXT; 985 } 986 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event); 987 return result; 988} 989 990VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) { 991 bool skip_call = false; 992 { 993 std::lock_guard<std::mutex> lock(global_lock); 994 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00227); 995 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, VALIDATION_ERROR_00228); 996 } 997 if (skip_call) { 998 return VK_ERROR_VALIDATION_FAILED_EXT; 999 } 1000 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event); 1001 return result; 1002} 1003 1004VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, 1005 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) { 1006 bool skip_call = false; 1007 { 1008 std::lock_guard<std::mutex> lock(global_lock); 1009 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_01002); 1010 } 1011 if (skip_call) { 1012 return VK_ERROR_VALIDATION_FAILED_EXT; 1013 } 1014 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); 1015 { 1016 std::lock_guard<std::mutex> lock(global_lock); 1017 if (result == VK_SUCCESS) { 1018 CreateObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator); 1019 } 1020 } 1021 return result; 1022} 1023 1024VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) { 1025 bool skip_call = false; 1026 { 1027 std::lock_guard<std::mutex> lock(global_lock); 1028 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_01015); 1029 skip_call |= ValidateObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, true, VALIDATION_ERROR_01016); 1030 } 1031 if (skip_call) { 1032 return; 1033 } 1034 { 1035 std::lock_guard<std::mutex> lock(global_lock); 1036 DestroyObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator); 1037 } 1038 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator); 1039} 1040 1041VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, 1042 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) { 1043 bool skip_call = false; 1044 { 1045 std::lock_guard<std::mutex> lock(global_lock); 1046 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_01054); 1047 skip_call |= ValidateObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01055); 1048 } 1049 if (skip_call) { 1050 return VK_ERROR_VALIDATION_FAILED_EXT; 1051 } 1052 VkResult result = get_dispatch_table(ot_device_table_map, device) 1053 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); 1054 return result; 1055} 1056 1057VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, 1058 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) { 1059 bool skip_call = false; 1060 { 1061 std::lock_guard<std::mutex> lock(global_lock); 1062 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00659); 1063 } 1064 if (skip_call) { 1065 return VK_ERROR_VALIDATION_FAILED_EXT; 1066 } 1067 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer); 1068 { 1069 std::lock_guard<std::mutex> lock(global_lock); 1070 if (result == VK_SUCCESS) { 1071 CreateObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator); 1072 } 1073 } 1074 return result; 1075} 1076 1077VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) { 1078 bool skip_call = false; 1079 { 1080 std::lock_guard<std::mutex> lock(global_lock); 1081 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, true, VALIDATION_ERROR_00680); 1082 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00679); 1083 } 1084 if (skip_call) { 1085 return; 1086 } 1087 { 1088 std::lock_guard<std::mutex> lock(global_lock); 1089 DestroyObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator); 1090 } 1091 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator); 1092} 1093 1094VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, 1095 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) { 1096 bool skip_call = false; 1097 { 1098 std::lock_guard<std::mutex> lock(global_lock); 1099 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00683); 1100 if (pCreateInfo) { 1101 skip_call |= 1102 ValidateObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00699); 1103 } 1104 } 1105 if (skip_call) { 1106 return VK_ERROR_VALIDATION_FAILED_EXT; 1107 } 1108 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView); 1109 { 1110 std::lock_guard<std::mutex> lock(global_lock); 1111 if (result == VK_SUCCESS) { 1112 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator); 1113 } 1114 } 1115 return result; 1116} 1117 1118VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) { 1119 bool skip_call = false; 1120 { 1121 std::lock_guard<std::mutex> lock(global_lock); 1122 skip_call |= ValidateObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true, VALIDATION_ERROR_00705); 1123 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00704); 1124 } 1125 if (skip_call) { 1126 return; 1127 } 1128 { 1129 std::lock_guard<std::mutex> lock(global_lock); 1130 DestroyObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator); 1131 } 1132 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator); 1133} 1134 1135VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, 1136 const VkAllocationCallbacks *pAllocator, VkImage *pImage) { 1137 bool skip_call = false; 1138 { 1139 std::lock_guard<std::mutex> lock(global_lock); 1140 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00709); 1141 } 1142 if (skip_call) { 1143 return VK_ERROR_VALIDATION_FAILED_EXT; 1144 } 1145 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage); 1146 { 1147 std::lock_guard<std::mutex> lock(global_lock); 1148 if (result == VK_SUCCESS) { 1149 CreateObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator); 1150 } 1151 } 1152 return result; 1153} 1154 1155VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) { 1156 bool skip_call = false; 1157 { 1158 std::lock_guard<std::mutex> lock(global_lock); 1159 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00746); 1160 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, true, VALIDATION_ERROR_00747); 1161 } 1162 if (skip_call) { 1163 return; 1164 } 1165 { 1166 std::lock_guard<std::mutex> lock(global_lock); 1167 DestroyObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator); 1168 } 1169 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator); 1170} 1171 1172VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, 1173 VkSubresourceLayout *pLayout) { 1174 bool skip_call = false; 1175 { 1176 std::lock_guard<std::mutex> lock(global_lock); 1177 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00734); 1178 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00735); 1179 } 1180 if (skip_call) { 1181 return; 1182 } 1183 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout); 1184} 1185 1186VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, 1187 const VkAllocationCallbacks *pAllocator, VkImageView *pView) { 1188 bool skip_call = false; 1189 { 1190 std::lock_guard<std::mutex> lock(global_lock); 1191 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00750); 1192 if (pCreateInfo) { 1193 skip_call |= 1194 ValidateObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00763); 1195 } 1196 } 1197 if (skip_call) { 1198 return VK_ERROR_VALIDATION_FAILED_EXT; 1199 } 1200 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView); 1201 { 1202 std::lock_guard<std::mutex> lock(global_lock); 1203 if (result == VK_SUCCESS) { 1204 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator); 1205 } 1206 } 1207 return result; 1208} 1209 1210VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) { 1211 bool skip_call = false; 1212 { 1213 std::lock_guard<std::mutex> lock(global_lock); 1214 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00779); 1215 skip_call |= ValidateObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, true, VALIDATION_ERROR_00780); 1216 } 1217 if (skip_call) { 1218 return; 1219 } 1220 { 1221 std::lock_guard<std::mutex> lock(global_lock); 1222 DestroyObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator); 1223 } 1224 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator); 1225} 1226 1227VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, 1228 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) { 1229 bool skip_call = false; 1230 { 1231 std::lock_guard<std::mutex> lock(global_lock); 1232 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00466); 1233 } 1234 if (skip_call) { 1235 return VK_ERROR_VALIDATION_FAILED_EXT; 1236 } 1237 VkResult result = 1238 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); 1239 { 1240 std::lock_guard<std::mutex> lock(global_lock); 1241 if (result == VK_SUCCESS) { 1242 CreateObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator); 1243 } 1244 } 1245 return result; 1246} 1247 1248VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, 1249 const VkAllocationCallbacks *pAllocator) { 1250 bool skip_call = false; 1251 { 1252 std::lock_guard<std::mutex> lock(global_lock); 1253 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00481); 1254 skip_call |= 1255 ValidateObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, true, VALIDATION_ERROR_00482); 1256 } 1257 if (skip_call) { 1258 return; 1259 } 1260 { 1261 std::lock_guard<std::mutex> lock(global_lock); 1262 DestroyObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator); 1263 } 1264 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator); 1265} 1266 1267VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, 1268 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) { 1269 bool skip_call = false; 1270 { 1271 std::lock_guard<std::mutex> lock(global_lock); 1272 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00562); 1273 } 1274 if (skip_call) { 1275 return VK_ERROR_VALIDATION_FAILED_EXT; 1276 } 1277 VkResult result = 1278 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); 1279 { 1280 std::lock_guard<std::mutex> lock(global_lock); 1281 if (result == VK_SUCCESS) { 1282 CreateObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator); 1283 } 1284 } 1285 return result; 1286} 1287 1288VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, 1289 const VkAllocationCallbacks *pAllocator) { 1290 bool skip_call = false; 1291 { 1292 std::lock_guard<std::mutex> lock(global_lock); 1293 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00585); 1294 skip_call |= 1295 ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, true, VALIDATION_ERROR_00586); 1296 } 1297 if (skip_call) { 1298 return; 1299 } 1300 { 1301 std::lock_guard<std::mutex> lock(global_lock); 1302 DestroyObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator); 1303 } 1304 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator); 1305} 1306 1307VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, 1308 void *pData) { 1309 bool skip_call = false; 1310 { 1311 std::lock_guard<std::mutex> lock(global_lock); 1312 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00578); 1313 skip_call |= 1314 ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false, VALIDATION_ERROR_00579); 1315 } 1316 if (skip_call) { 1317 return VK_ERROR_VALIDATION_FAILED_EXT; 1318 } 1319 VkResult result = 1320 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData); 1321 return result; 1322} 1323 1324VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, 1325 const VkPipelineCache *pSrcCaches) { 1326 bool skip_call = false; 1327 { 1328 std::lock_guard<std::mutex> lock(global_lock); 1329 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00572); 1330 skip_call |= 1331 ValidateObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false, VALIDATION_ERROR_00573); 1332 if (pSrcCaches) { 1333 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) { 1334 skip_call |= ValidateObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false, 1335 VALIDATION_ERROR_00577); 1336 } 1337 } 1338 } 1339 if (skip_call) { 1340 return VK_ERROR_VALIDATION_FAILED_EXT; 1341 } 1342 VkResult result = 1343 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); 1344 return result; 1345} 1346 1347VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) { 1348 bool skip_call = false; 1349 { 1350 std::lock_guard<std::mutex> lock(global_lock); 1351 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00558); 1352 skip_call |= ValidateObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true, VALIDATION_ERROR_00559); 1353 } 1354 if (skip_call) { 1355 return; 1356 } 1357 { 1358 std::lock_guard<std::mutex> lock(global_lock); 1359 DestroyObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator); 1360 } 1361 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator); 1362} 1363 1364VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, 1365 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) { 1366 bool skip_call = false; 1367 { 1368 std::lock_guard<std::mutex> lock(global_lock); 1369 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00861); 1370 if (pCreateInfo) { 1371 if (pCreateInfo->pSetLayouts) { 1372 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) { 1373 skip_call |= 1374 ValidateObject(device, pCreateInfo->pSetLayouts[idx0], 1375 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false, VALIDATION_ERROR_00875); 1376 } 1377 } 1378 } 1379 } 1380 if (skip_call) { 1381 return VK_ERROR_VALIDATION_FAILED_EXT; 1382 } 1383 VkResult result = 1384 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); 1385 { 1386 std::lock_guard<std::mutex> lock(global_lock); 1387 if (result == VK_SUCCESS) { 1388 CreateObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator); 1389 } 1390 } 1391 return result; 1392} 1393 1394VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, 1395 const VkAllocationCallbacks *pAllocator) { 1396 bool skip_call = false; 1397 { 1398 std::lock_guard<std::mutex> lock(global_lock); 1399 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00885); 1400 skip_call |= 1401 ValidateObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, true, VALIDATION_ERROR_00886); 1402 } 1403 if (skip_call) { 1404 return; 1405 } 1406 { 1407 std::lock_guard<std::mutex> lock(global_lock); 1408 DestroyObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator); 1409 } 1410 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator); 1411} 1412 1413VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, 1414 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) { 1415 bool skip_call = false; 1416 { 1417 std::lock_guard<std::mutex> lock(global_lock); 1418 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00812); 1419 } 1420 if (skip_call) { 1421 return VK_ERROR_VALIDATION_FAILED_EXT; 1422 } 1423 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler); 1424 { 1425 std::lock_guard<std::mutex> lock(global_lock); 1426 if (result == VK_SUCCESS) { 1427 CreateObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator); 1428 } 1429 } 1430 return result; 1431} 1432 1433VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) { 1434 bool skip_call = false; 1435 { 1436 std::lock_guard<std::mutex> lock(global_lock); 1437 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00840); 1438 skip_call |= ValidateObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, true, VALIDATION_ERROR_00841); 1439 } 1440 if (skip_call) { 1441 return; 1442 } 1443 { 1444 std::lock_guard<std::mutex> lock(global_lock); 1445 DestroyObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator); 1446 } 1447 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator); 1448} 1449 1450VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, 1451 const VkAllocationCallbacks *pAllocator, 1452 VkDescriptorSetLayout *pSetLayout) { 1453 bool skip_call = false; 1454 { 1455 std::lock_guard<std::mutex> lock(global_lock); 1456 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00844); 1457 if (pCreateInfo) { 1458 if (pCreateInfo->pBindings) { 1459 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) { 1460 if ((pCreateInfo->pBindings[idx0].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || 1461 (pCreateInfo->pBindings[idx0].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) { 1462 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) { 1463 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) { 1464 skip_call |= ValidateObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1], 1465 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false, VALIDATION_ERROR_00852); 1466 } 1467 } 1468 } 1469 } 1470 } 1471 } 1472 } 1473 if (skip_call) { 1474 return VK_ERROR_VALIDATION_FAILED_EXT; 1475 } 1476 VkResult result = 1477 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); 1478 { 1479 std::lock_guard<std::mutex> lock(global_lock); 1480 if (result == VK_SUCCESS) { 1481 CreateObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator); 1482 } 1483 } 1484 return result; 1485} 1486 1487VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, 1488 const VkAllocationCallbacks *pAllocator) { 1489 bool skip_call = false; 1490 { 1491 std::lock_guard<std::mutex> lock(global_lock); 1492 skip_call |= ValidateObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, true, 1493 VALIDATION_ERROR_00858); 1494 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00857); 1495 } 1496 if (skip_call) { 1497 return; 1498 } 1499 { 1500 std::lock_guard<std::mutex> lock(global_lock); 1501 DestroyObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator); 1502 } 1503 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); 1504} 1505 1506VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, 1507 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) { 1508 bool skip_call = false; 1509 { 1510 std::lock_guard<std::mutex> lock(global_lock); 1511 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00889); 1512 } 1513 if (skip_call) { 1514 return VK_ERROR_VALIDATION_FAILED_EXT; 1515 } 1516 VkResult result = 1517 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); 1518 { 1519 std::lock_guard<std::mutex> lock(global_lock); 1520 if (result == VK_SUCCESS) { 1521 CreateObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator); 1522 } 1523 } 1524 return result; 1525} 1526 1527VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, 1528 VkDescriptorPoolResetFlags flags) { 1529 bool skip_call = false; 1530 std::unique_lock<std::mutex> lock(global_lock); 1531 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 1532 skip_call |= 1533 ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false, VALIDATION_ERROR_00930); 1534 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00929); 1535 if (skip_call) { 1536 return VK_ERROR_VALIDATION_FAILED_EXT; 1537 } 1538 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is reset. 1539 // Remove this pool's descriptor sets from our descriptorSet map. 1540 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin(); 1541 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) { 1542 OBJTRACK_NODE *pNode = (*itr).second; 1543 auto del_itr = itr++; 1544 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) { 1545 DestroyObject(device, (VkDescriptorSet)((*del_itr).first), 1546 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr); 1547 } 1548 } 1549 lock.unlock(); 1550 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags); 1551 return result; 1552} 1553 1554VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, 1555 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, 1556 const VkCopyDescriptorSet *pDescriptorCopies) { 1557 bool skip_call = false; 1558 { 1559 std::lock_guard<std::mutex> lock(global_lock); 1560 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00933); 1561 if (pDescriptorCopies) { 1562 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) { 1563 if (pDescriptorCopies[idx0].dstSet) { 1564 skip_call |= ValidateObject(device, pDescriptorCopies[idx0].dstSet, 1565 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false, VALIDATION_ERROR_00972); 1566 } 1567 if (pDescriptorCopies[idx0].srcSet) { 1568 skip_call |= ValidateObject(device, pDescriptorCopies[idx0].srcSet, 1569 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false, VALIDATION_ERROR_00971); 1570 } 1571 } 1572 } 1573 if (pDescriptorWrites) { 1574 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) { 1575 if (pDescriptorWrites[idx1].dstSet) { 1576 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].dstSet, 1577 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false, VALIDATION_ERROR_00955); 1578 } 1579 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) || 1580 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) { 1581 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) { 1582 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx2], 1583 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false, VALIDATION_ERROR_00940); 1584 } 1585 } 1586 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || 1587 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || 1588 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) || 1589 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)) { 1590 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) { 1591 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) { 1592 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView, 1593 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false, VALIDATION_ERROR_00943); 1594 } 1595 } 1596 } 1597 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || 1598 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) || 1599 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || 1600 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) { 1601 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) { 1602 if (pDescriptorWrites[idx1].pBufferInfo[idx4].buffer) { 1603 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pBufferInfo[idx4].buffer, 1604 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00962); 1605 } 1606 } 1607 } 1608 } 1609 } 1610 } 1611 if (skip_call) { 1612 return; 1613 } 1614 get_dispatch_table(ot_device_table_map, device) 1615 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); 1616} 1617 1618VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, 1619 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) { 1620 bool skip_call = false; 1621 { 1622 std::lock_guard<std::mutex> lock(global_lock); 1623 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00400); 1624 if (pCreateInfo) { 1625 if (pCreateInfo->pAttachments) { 1626 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) { 1627 skip_call |= ValidateObject(device, pCreateInfo->pAttachments[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, 1628 false, VALIDATION_ERROR_00420); 1629 } 1630 } 1631 if (pCreateInfo->renderPass) { 1632 skip_call |= ValidateObject(device, pCreateInfo->renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false, 1633 VALIDATION_ERROR_00419); 1634 } 1635 } 1636 } 1637 if (skip_call) { 1638 return VK_ERROR_VALIDATION_FAILED_EXT; 1639 } 1640 VkResult result = 1641 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); 1642 { 1643 std::lock_guard<std::mutex> lock(global_lock); 1644 if (result == VK_SUCCESS) { 1645 CreateObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator); 1646 } 1647 } 1648 return result; 1649} 1650 1651VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) { 1652 bool skip_call = false; 1653 { 1654 std::lock_guard<std::mutex> lock(global_lock); 1655 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00425); 1656 skip_call |= ValidateObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true, VALIDATION_ERROR_00426); 1657 } 1658 if (skip_call) { 1659 return; 1660 } 1661 { 1662 std::lock_guard<std::mutex> lock(global_lock); 1663 DestroyObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator); 1664 } 1665 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator); 1666} 1667 1668VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, 1669 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) { 1670 bool skip_call = false; 1671 { 1672 std::lock_guard<std::mutex> lock(global_lock); 1673 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00319); 1674 } 1675 if (skip_call) { 1676 return VK_ERROR_VALIDATION_FAILED_EXT; 1677 } 1678 VkResult result = 1679 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); 1680 { 1681 std::lock_guard<std::mutex> lock(global_lock); 1682 if (result == VK_SUCCESS) { 1683 CreateObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator); 1684 } 1685 } 1686 return result; 1687} 1688 1689VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) { 1690 bool skip_call = false; 1691 { 1692 std::lock_guard<std::mutex> lock(global_lock); 1693 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00396); 1694 skip_call |= ValidateObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true, VALIDATION_ERROR_00397); 1695 } 1696 if (skip_call) { 1697 return; 1698 } 1699 { 1700 std::lock_guard<std::mutex> lock(global_lock); 1701 DestroyObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator); 1702 } 1703 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator); 1704} 1705 1706VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) { 1707 bool skip_call = false; 1708 { 1709 std::lock_guard<std::mutex> lock(global_lock); 1710 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00449); 1711 skip_call |= ValidateObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false, VALIDATION_ERROR_00450); 1712 } 1713 if (skip_call) { 1714 return; 1715 } 1716 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity); 1717} 1718 1719VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, 1720 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) { 1721 bool skip_call = false; 1722 { 1723 std::lock_guard<std::mutex> lock(global_lock); 1724 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00064); 1725 } 1726 if (skip_call) { 1727 return VK_ERROR_VALIDATION_FAILED_EXT; 1728 } 1729 VkResult result = 1730 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); 1731 { 1732 std::lock_guard<std::mutex> lock(global_lock); 1733 if (result == VK_SUCCESS) { 1734 CreateObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator); 1735 } 1736 } 1737 return result; 1738} 1739 1740VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) { 1741 bool skip_call = false; 1742 { 1743 std::lock_guard<std::mutex> lock(global_lock); 1744 skip_call |= 1745 ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false, VALIDATION_ERROR_00074); 1746 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00073); 1747 } 1748 if (skip_call) { 1749 return VK_ERROR_VALIDATION_FAILED_EXT; 1750 } 1751 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags); 1752 return result; 1753} 1754 1755VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) { 1756 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map); 1757 bool skip_call = false; 1758 { 1759 std::lock_guard<std::mutex> lock(global_lock); 1760 skip_call |= ValidateObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1761 VALIDATION_ERROR_00108); 1762 if (begin_info) { 1763 OBJTRACK_NODE *pNode = 1764 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)]; 1765 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) { 1766 skip_call |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->framebuffer, 1767 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true); 1768 skip_call |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->renderPass, 1769 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true); 1770 } 1771 } 1772 } 1773 if (skip_call) { 1774 return VK_ERROR_VALIDATION_FAILED_EXT; 1775 } 1776 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info); 1777 return result; 1778} 1779 1780VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) { 1781 bool skip_call = false; 1782 { 1783 std::lock_guard<std::mutex> lock(global_lock); 1784 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1785 VALIDATION_ERROR_00125); 1786 } 1787 if (skip_call) { 1788 return VK_ERROR_VALIDATION_FAILED_EXT; 1789 } 1790 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer); 1791 return result; 1792} 1793 1794VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) { 1795 bool skip_call = false; 1796 { 1797 std::lock_guard<std::mutex> lock(global_lock); 1798 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1799 VALIDATION_ERROR_00090); 1800 } 1801 if (skip_call) { 1802 return VK_ERROR_VALIDATION_FAILED_EXT; 1803 } 1804 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags); 1805 return result; 1806} 1807 1808VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, 1809 VkPipeline pipeline) { 1810 bool skip_call = false; 1811 { 1812 std::lock_guard<std::mutex> lock(global_lock); 1813 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1814 VALIDATION_ERROR_00599); 1815 skip_call |= 1816 ValidateObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false, VALIDATION_ERROR_00601); 1817 } 1818 if (skip_call) { 1819 return; 1820 } 1821 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); 1822} 1823 1824VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, 1825 const VkViewport *pViewports) { 1826 bool skip_call = false; 1827 { 1828 std::lock_guard<std::mutex> lock(global_lock); 1829 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1830 VALIDATION_ERROR_01443); 1831 } 1832 if (skip_call) { 1833 return; 1834 } 1835 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); 1836} 1837 1838VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, 1839 const VkRect2D *pScissors) { 1840 bool skip_call = false; 1841 { 1842 std::lock_guard<std::mutex> lock(global_lock); 1843 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1844 VALIDATION_ERROR_01492); 1845 } 1846 if (skip_call) { 1847 return; 1848 } 1849 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); 1850} 1851 1852VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) { 1853 bool skip_call = false; 1854 { 1855 std::lock_guard<std::mutex> lock(global_lock); 1856 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1857 VALIDATION_ERROR_01478); 1858 } 1859 if (skip_call) { 1860 return; 1861 } 1862 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth); 1863} 1864 1865VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, 1866 float depthBiasSlopeFactor) { 1867 bool skip_call = false; 1868 { 1869 std::lock_guard<std::mutex> lock(global_lock); 1870 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1871 VALIDATION_ERROR_01483); 1872 } 1873 if (skip_call) { 1874 return; 1875 } 1876 get_dispatch_table(ot_device_table_map, commandBuffer) 1877 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); 1878} 1879 1880VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) { 1881 bool skip_call = false; 1882 { 1883 std::lock_guard<std::mutex> lock(global_lock); 1884 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1885 VALIDATION_ERROR_01551); 1886 } 1887 if (skip_call) { 1888 return; 1889 } 1890 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants); 1891} 1892 1893VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) { 1894 bool skip_call = false; 1895 { 1896 std::lock_guard<std::mutex> lock(global_lock); 1897 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1898 VALIDATION_ERROR_01507); 1899 } 1900 if (skip_call) { 1901 return; 1902 } 1903 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); 1904} 1905 1906VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, 1907 uint32_t compareMask) { 1908 bool skip_call = false; 1909 { 1910 std::lock_guard<std::mutex> lock(global_lock); 1911 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1912 VALIDATION_ERROR_01515); 1913 } 1914 if (skip_call) { 1915 return; 1916 } 1917 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); 1918} 1919 1920VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) { 1921 bool skip_call = false; 1922 { 1923 std::lock_guard<std::mutex> lock(global_lock); 1924 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1925 VALIDATION_ERROR_01521); 1926 } 1927 if (skip_call) { 1928 return; 1929 } 1930 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); 1931} 1932 1933VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) { 1934 bool skip_call = false; 1935 { 1936 std::lock_guard<std::mutex> lock(global_lock); 1937 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1938 VALIDATION_ERROR_01527); 1939 } 1940 if (skip_call) { 1941 return; 1942 } 1943 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference); 1944} 1945 1946VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, 1947 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, 1948 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, 1949 const uint32_t *pDynamicOffsets) { 1950 bool skip_call = false; 1951 { 1952 std::lock_guard<std::mutex> lock(global_lock); 1953 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1954 VALIDATION_ERROR_00979); 1955 skip_call |= 1956 ValidateObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false, VALIDATION_ERROR_00981); 1957 if (pDescriptorSets) { 1958 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) { 1959 skip_call |= ValidateObject(commandBuffer, pDescriptorSets[idx0], 1960 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false); 1961 } 1962 } 1963 } 1964 if (skip_call) { 1965 return; 1966 } 1967 get_dispatch_table(ot_device_table_map, commandBuffer) 1968 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, 1969 dynamicOffsetCount, pDynamicOffsets); 1970} 1971 1972VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 1973 VkIndexType indexType) { 1974 bool skip_call = false; 1975 { 1976 std::lock_guard<std::mutex> lock(global_lock); 1977 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01354); 1978 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1979 VALIDATION_ERROR_01353); 1980 } 1981 if (skip_call) { 1982 return; 1983 } 1984 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); 1985} 1986 1987VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, 1988 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) { 1989 bool skip_call = false; 1990 { 1991 std::lock_guard<std::mutex> lock(global_lock); 1992 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 1993 VALIDATION_ERROR_01419); 1994 if (pBuffers) { 1995 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) { 1996 skip_call |= 1997 ValidateObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false); 1998 } 1999 } 2000 } 2001 if (skip_call) { 2002 return; 2003 } 2004 get_dispatch_table(ot_device_table_map, commandBuffer) 2005 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); 2006} 2007 2008VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, 2009 uint32_t firstVertex, uint32_t firstInstance) { 2010 bool skip_call = false; 2011 { 2012 std::lock_guard<std::mutex> lock(global_lock); 2013 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2014 VALIDATION_ERROR_01362); 2015 } 2016 if (skip_call) { 2017 return; 2018 } 2019 get_dispatch_table(ot_device_table_map, commandBuffer) 2020 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); 2021} 2022 2023VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, 2024 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) { 2025 bool skip_call = false; 2026 { 2027 std::lock_guard<std::mutex> lock(global_lock); 2028 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2029 VALIDATION_ERROR_01369); 2030 } 2031 if (skip_call) { 2032 return; 2033 } 2034 get_dispatch_table(ot_device_table_map, commandBuffer) 2035 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); 2036} 2037 2038VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, 2039 uint32_t stride) { 2040 bool skip_call = false; 2041 { 2042 std::lock_guard<std::mutex> lock(global_lock); 2043 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01378); 2044 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2045 VALIDATION_ERROR_01377); 2046 } 2047 if (skip_call) { 2048 return; 2049 } 2050 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); 2051} 2052 2053VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 2054 uint32_t drawCount, uint32_t stride) { 2055 bool skip_call = false; 2056 { 2057 std::lock_guard<std::mutex> lock(global_lock); 2058 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01390); 2059 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2060 VALIDATION_ERROR_01389); 2061 } 2062 if (skip_call) { 2063 return; 2064 } 2065 get_dispatch_table(ot_device_table_map, commandBuffer) 2066 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride); 2067} 2068 2069VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) { 2070 bool skip_call = false; 2071 { 2072 std::lock_guard<std::mutex> lock(global_lock); 2073 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2074 VALIDATION_ERROR_01559); 2075 } 2076 if (skip_call) { 2077 return; 2078 } 2079 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z); 2080} 2081 2082VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) { 2083 bool skip_call = false; 2084 { 2085 std::lock_guard<std::mutex> lock(global_lock); 2086 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01566); 2087 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2088 VALIDATION_ERROR_01565); 2089 } 2090 if (skip_call) { 2091 return; 2092 } 2093 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset); 2094} 2095 2096VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, 2097 uint32_t regionCount, const VkBufferCopy *pRegions) { 2098 bool skip_call = false; 2099 { 2100 std::lock_guard<std::mutex> lock(global_lock); 2101 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2102 VALIDATION_ERROR_01166); 2103 skip_call |= 2104 ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01168); 2105 skip_call |= 2106 ValidateObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01167); 2107 } 2108 if (skip_call) { 2109 return; 2110 } 2111 get_dispatch_table(ot_device_table_map, commandBuffer) 2112 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); 2113} 2114 2115VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 2116 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 2117 const VkImageCopy *pRegions) { 2118 bool skip_call = false; 2119 { 2120 std::lock_guard<std::mutex> lock(global_lock); 2121 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2122 VALIDATION_ERROR_01186); 2123 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01189); 2124 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01187); 2125 } 2126 if (skip_call) { 2127 return; 2128 } 2129 get_dispatch_table(ot_device_table_map, commandBuffer) 2130 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 2131} 2132 2133VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 2134 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 2135 const VkImageBlit *pRegions, VkFilter filter) { 2136 bool skip_call = false; 2137 { 2138 std::lock_guard<std::mutex> lock(global_lock); 2139 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2140 VALIDATION_ERROR_01291); 2141 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01294); 2142 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01292); 2143 } 2144 if (skip_call) { 2145 return; 2146 } 2147 get_dispatch_table(ot_device_table_map, commandBuffer) 2148 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); 2149} 2150 2151VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, 2152 VkImageLayout dstImageLayout, uint32_t regionCount, 2153 const VkBufferImageCopy *pRegions) { 2154 bool skip_call = false; 2155 { 2156 std::lock_guard<std::mutex> lock(global_lock); 2157 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2158 VALIDATION_ERROR_01235); 2159 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01237); 2160 skip_call |= 2161 ValidateObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01236); 2162 } 2163 if (skip_call) { 2164 return; 2165 } 2166 get_dispatch_table(ot_device_table_map, commandBuffer) 2167 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); 2168} 2169 2170VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 2171 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) { 2172 bool skip_call = false; 2173 { 2174 std::lock_guard<std::mutex> lock(global_lock); 2175 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2176 VALIDATION_ERROR_01253); 2177 skip_call |= 2178 ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01256); 2179 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01254); 2180 } 2181 if (skip_call) { 2182 return; 2183 } 2184 get_dispatch_table(ot_device_table_map, commandBuffer) 2185 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); 2186} 2187 2188VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 2189 VkDeviceSize dataSize, const uint32_t *pData) { 2190 bool skip_call = false; 2191 { 2192 std::lock_guard<std::mutex> lock(global_lock); 2193 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2194 VALIDATION_ERROR_01150); 2195 skip_call |= 2196 ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01151); 2197 } 2198 if (skip_call) { 2199 return; 2200 } 2201 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); 2202} 2203 2204VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, 2205 VkDeviceSize size, uint32_t data) { 2206 bool skip_call = false; 2207 { 2208 std::lock_guard<std::mutex> lock(global_lock); 2209 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2210 VALIDATION_ERROR_01138); 2211 skip_call |= 2212 ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01139); 2213 } 2214 if (skip_call) { 2215 return; 2216 } 2217 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); 2218} 2219 2220VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 2221 const VkClearColorValue *pColor, uint32_t rangeCount, 2222 const VkImageSubresourceRange *pRanges) { 2223 bool skip_call = false; 2224 { 2225 std::lock_guard<std::mutex> lock(global_lock); 2226 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2227 VALIDATION_ERROR_01089); 2228 skip_call |= ValidateObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01090); 2229 } 2230 if (skip_call) { 2231 return; 2232 } 2233 get_dispatch_table(ot_device_table_map, commandBuffer) 2234 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); 2235} 2236 2237VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, 2238 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, 2239 const VkImageSubresourceRange *pRanges) { 2240 bool skip_call = false; 2241 { 2242 std::lock_guard<std::mutex> lock(global_lock); 2243 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2244 VALIDATION_ERROR_01104); 2245 skip_call |= ValidateObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01105); 2246 } 2247 if (skip_call) { 2248 return; 2249 } 2250 get_dispatch_table(ot_device_table_map, commandBuffer) 2251 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); 2252} 2253 2254VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, 2255 const VkClearAttachment *pAttachments, uint32_t rectCount, 2256 const VkClearRect *pRects) { 2257 bool skip_call = false; 2258 { 2259 std::lock_guard<std::mutex> lock(global_lock); 2260 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2261 VALIDATION_ERROR_01117); 2262 } 2263 if (skip_call) { 2264 return; 2265 } 2266 get_dispatch_table(ot_device_table_map, commandBuffer) 2267 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); 2268} 2269 2270VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, 2271 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, 2272 const VkImageResolve *pRegions) { 2273 bool skip_call = false; 2274 { 2275 std::lock_guard<std::mutex> lock(global_lock); 2276 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2277 VALIDATION_ERROR_01327); 2278 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01330); 2279 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_01328); 2280 } 2281 if (skip_call) { 2282 return; 2283 } 2284 get_dispatch_table(ot_device_table_map, commandBuffer) 2285 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); 2286} 2287 2288VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 2289 bool skip_call = false; 2290 { 2291 std::lock_guard<std::mutex> lock(global_lock); 2292 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2293 VALIDATION_ERROR_00232); 2294 skip_call |= ValidateObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, VALIDATION_ERROR_00233); 2295 } 2296 if (skip_call) { 2297 return; 2298 } 2299 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask); 2300} 2301 2302VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) { 2303 bool skip_call = false; 2304 { 2305 std::lock_guard<std::mutex> lock(global_lock); 2306 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2307 VALIDATION_ERROR_00243); 2308 skip_call |= ValidateObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, VALIDATION_ERROR_00244); 2309 } 2310 if (skip_call) { 2311 return; 2312 } 2313 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask); 2314} 2315 2316VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, 2317 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, 2318 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 2319 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 2320 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 2321 bool skip_call = false; 2322 { 2323 std::lock_guard<std::mutex> lock(global_lock); 2324 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2325 VALIDATION_ERROR_00252); 2326 if (pBufferMemoryBarriers) { 2327 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) { 2328 if (pBufferMemoryBarriers[idx0].buffer) { 2329 skip_call |= ValidateObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer, 2330 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00259); 2331 } 2332 } 2333 } 2334 if (pEvents) { 2335 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) { 2336 skip_call |= ValidateObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false, 2337 VALIDATION_ERROR_00253); 2338 } 2339 } 2340 if (pImageMemoryBarriers) { 2341 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) { 2342 if (pImageMemoryBarriers[idx2].image) { 2343 skip_call |= ValidateObject(commandBuffer, pImageMemoryBarriers[idx2].image, 2344 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00260); 2345 } 2346 } 2347 } 2348 } 2349 if (skip_call) { 2350 return; 2351 } 2352 get_dispatch_table(ot_device_table_map, commandBuffer) 2353 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, 2354 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 2355} 2356 2357VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, 2358 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, 2359 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, 2360 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, 2361 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) { 2362 bool skip_call = false; 2363 { 2364 std::lock_guard<std::mutex> lock(global_lock); 2365 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2366 VALIDATION_ERROR_00270); 2367 if (pBufferMemoryBarriers) { 2368 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) { 2369 if (pBufferMemoryBarriers[idx0].buffer) { 2370 skip_call |= ValidateObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer, 2371 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_00277); 2372 } 2373 } 2374 } 2375 if (pImageMemoryBarriers) { 2376 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) { 2377 if (pImageMemoryBarriers[idx1].image) { 2378 skip_call |= ValidateObject(commandBuffer, pImageMemoryBarriers[idx1].image, 2379 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false, VALIDATION_ERROR_00278); 2380 } 2381 } 2382 } 2383 } 2384 if (skip_call) { 2385 return; 2386 } 2387 get_dispatch_table(ot_device_table_map, commandBuffer) 2388 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, 2389 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); 2390} 2391 2392VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, 2393 VkQueryControlFlags flags) { 2394 bool skip_call = false; 2395 { 2396 std::lock_guard<std::mutex> lock(global_lock); 2397 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2398 VALIDATION_ERROR_01035); 2399 skip_call |= 2400 ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01036); 2401 } 2402 if (skip_call) { 2403 return; 2404 } 2405 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags); 2406} 2407 2408VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) { 2409 bool skip_call = false; 2410 { 2411 std::lock_guard<std::mutex> lock(global_lock); 2412 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2413 VALIDATION_ERROR_01043); 2414 skip_call |= 2415 ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01044); 2416 } 2417 if (skip_call) { 2418 return; 2419 } 2420 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query); 2421} 2422 2423VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, 2424 uint32_t queryCount) { 2425 bool skip_call = false; 2426 { 2427 std::lock_guard<std::mutex> lock(global_lock); 2428 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2429 VALIDATION_ERROR_01021); 2430 skip_call |= 2431 ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01022); 2432 } 2433 if (skip_call) { 2434 return; 2435 } 2436 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); 2437} 2438 2439VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, 2440 VkQueryPool queryPool, uint32_t query) { 2441 bool skip_call = false; 2442 { 2443 std::lock_guard<std::mutex> lock(global_lock); 2444 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2445 VALIDATION_ERROR_01078); 2446 skip_call |= 2447 ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01080); 2448 } 2449 if (skip_call) { 2450 return; 2451 } 2452 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); 2453} 2454 2455VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, 2456 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, 2457 VkDeviceSize stride, VkQueryResultFlags flags) { 2458 bool skip_call = false; 2459 { 2460 std::lock_guard<std::mutex> lock(global_lock); 2461 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2462 VALIDATION_ERROR_01068); 2463 skip_call |= 2464 ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false, VALIDATION_ERROR_01070); 2465 skip_call |= 2466 ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false, VALIDATION_ERROR_01069); 2467 } 2468 if (skip_call) { 2469 return; 2470 } 2471 get_dispatch_table(ot_device_table_map, commandBuffer) 2472 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); 2473} 2474 2475VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, 2476 uint32_t offset, uint32_t size, const void *pValues) { 2477 bool skip_call = false; 2478 { 2479 std::lock_guard<std::mutex> lock(global_lock); 2480 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2481 VALIDATION_ERROR_00993); 2482 skip_call |= 2483 ValidateObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false, VALIDATION_ERROR_00994); 2484 } 2485 if (skip_call) { 2486 return; 2487 } 2488 get_dispatch_table(ot_device_table_map, commandBuffer) 2489 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); 2490} 2491 2492VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, 2493 VkSubpassContents contents) { 2494 bool skip_call = false; 2495 { 2496 std::lock_guard<std::mutex> lock(global_lock); 2497 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2498 VALIDATION_ERROR_00435); 2499 if (pRenderPassBegin) { 2500 skip_call |= ValidateObject(commandBuffer, pRenderPassBegin->framebuffer, 2501 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false); 2502 skip_call |= ValidateObject(commandBuffer, pRenderPassBegin->renderPass, 2503 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false); 2504 } 2505 } 2506 if (skip_call) { 2507 return; 2508 } 2509 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); 2510} 2511 2512VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) { 2513 bool skip_call = false; 2514 { 2515 std::lock_guard<std::mutex> lock(global_lock); 2516 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2517 VALIDATION_ERROR_00454); 2518 } 2519 if (skip_call) { 2520 return; 2521 } 2522 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents); 2523} 2524 2525VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) { 2526 bool skip_call = false; 2527 { 2528 std::lock_guard<std::mutex> lock(global_lock); 2529 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2530 VALIDATION_ERROR_00461); 2531 } 2532 if (skip_call) { 2533 return; 2534 } 2535 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer); 2536} 2537 2538VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, 2539 const VkCommandBuffer *pCommandBuffers) { 2540 bool skip_call = false; 2541 { 2542 std::lock_guard<std::mutex> lock(global_lock); 2543 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false, 2544 VALIDATION_ERROR_00159); 2545 if (pCommandBuffers) { 2546 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) { 2547 skip_call |= ValidateObject(commandBuffer, pCommandBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 2548 false, VALIDATION_ERROR_00160); 2549 } 2550 } 2551 } 2552 if (skip_call) { 2553 return; 2554 } 2555 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); 2556} 2557 2558VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) { 2559 bool skip_call = false; 2560 { 2561 std::lock_guard<std::mutex> lock(global_lock); 2562 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2563 skip_call |= ValidateObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2564 } 2565 if (skip_call) { 2566 return; 2567 } 2568 { 2569 std::lock_guard<std::mutex> lock(global_lock); 2570 DestroyObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2571 } 2572 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator); 2573} 2574 2575VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, 2576 VkSurfaceKHR surface, VkBool32 *pSupported) { 2577 bool skip_call = false; 2578 { 2579 std::lock_guard<std::mutex> lock(global_lock); 2580 skip_call |= 2581 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2582 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2583 } 2584 if (skip_call) { 2585 return VK_ERROR_VALIDATION_FAILED_EXT; 2586 } 2587 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2588 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); 2589 return result; 2590} 2591 2592VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 2593 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) { 2594 bool skip_call = false; 2595 { 2596 std::lock_guard<std::mutex> lock(global_lock); 2597 skip_call |= 2598 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2599 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2600 } 2601 if (skip_call) { 2602 return VK_ERROR_VALIDATION_FAILED_EXT; 2603 } 2604 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2605 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); 2606 return result; 2607} 2608 2609VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 2610 uint32_t *pSurfaceFormatCount, 2611 VkSurfaceFormatKHR *pSurfaceFormats) { 2612 bool skip_call = false; 2613 { 2614 std::lock_guard<std::mutex> lock(global_lock); 2615 skip_call |= 2616 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2617 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2618 } 2619 if (skip_call) { 2620 return VK_ERROR_VALIDATION_FAILED_EXT; 2621 } 2622 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2623 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); 2624 return result; 2625} 2626 2627VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, 2628 uint32_t *pPresentModeCount, 2629 VkPresentModeKHR *pPresentModes) { 2630 bool skip_call = false; 2631 { 2632 std::lock_guard<std::mutex> lock(global_lock); 2633 skip_call |= 2634 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2635 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2636 } 2637 if (skip_call) { 2638 return VK_ERROR_VALIDATION_FAILED_EXT; 2639 } 2640 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2641 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); 2642 return result; 2643} 2644 2645VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, 2646 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) { 2647 bool skip_call = false; 2648 { 2649 std::lock_guard<std::mutex> lock(global_lock); 2650 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 2651 if (pCreateInfo) { 2652 skip_call |= ValidateObject(device, pCreateInfo->oldSwapchain, 2653 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true); 2654 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2655 skip_call |= ValidateObject(device_data->physical_device, pCreateInfo->surface, 2656 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2657 } 2658 } 2659 if (skip_call) { 2660 return VK_ERROR_VALIDATION_FAILED_EXT; 2661 } 2662 VkResult result = 2663 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); 2664 { 2665 std::lock_guard<std::mutex> lock(global_lock); 2666 if (result == VK_SUCCESS) { 2667 CreateObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator); 2668 } 2669 } 2670 return result; 2671} 2672 2673VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, 2674 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) { 2675 bool skip_call = false; 2676 { 2677 std::lock_guard<std::mutex> lock(global_lock); 2678 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 2679 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true); 2680 skip_call |= ValidateObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true); 2681 skip_call |= ValidateObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false); 2682 } 2683 if (skip_call) { 2684 return VK_ERROR_VALIDATION_FAILED_EXT; 2685 } 2686 VkResult result = get_dispatch_table(ot_device_table_map, device) 2687 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); 2688 return result; 2689} 2690 2691VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) { 2692 bool skip_call = false; 2693 { 2694 std::lock_guard<std::mutex> lock(global_lock); 2695 if (pPresentInfo) { 2696 if (pPresentInfo->pSwapchains) { 2697 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) { 2698 skip_call |= ValidateObject(queue, pPresentInfo->pSwapchains[idx0], 2699 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false); 2700 } 2701 } 2702 if (pPresentInfo->pWaitSemaphores) { 2703 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) { 2704 skip_call |= ValidateObject(queue, pPresentInfo->pWaitSemaphores[idx1], 2705 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false); 2706 } 2707 } 2708 } 2709 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false); 2710 } 2711 if (skip_call) { 2712 return VK_ERROR_VALIDATION_FAILED_EXT; 2713 } 2714 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo); 2715 return result; 2716} 2717 2718#ifdef VK_USE_PLATFORM_WIN32_KHR 2719VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, 2720 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2721 bool skip_call = false; 2722 { 2723 std::lock_guard<std::mutex> lock(global_lock); 2724 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2725 } 2726 if (skip_call) { 2727 return VK_ERROR_VALIDATION_FAILED_EXT; 2728 } 2729 VkResult result = 2730 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2731 { 2732 std::lock_guard<std::mutex> lock(global_lock); 2733 if (result == VK_SUCCESS) { 2734 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2735 } 2736 } 2737 return result; 2738} 2739 2740VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, 2741 uint32_t queueFamilyIndex) { 2742 bool skip_call = false; 2743 { 2744 std::lock_guard<std::mutex> lock(global_lock); 2745 skip_call |= 2746 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2747 } 2748 if (skip_call) { 2749 return VK_FALSE; 2750 } 2751 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2752 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex); 2753 return result; 2754} 2755#endif // VK_USE_PLATFORM_WIN32_KHR 2756 2757#ifdef VK_USE_PLATFORM_XCB_KHR 2758VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, 2759 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2760 bool skip_call = false; 2761 { 2762 std::lock_guard<std::mutex> lock(global_lock); 2763 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2764 } 2765 if (skip_call) { 2766 return VK_ERROR_VALIDATION_FAILED_EXT; 2767 } 2768 VkResult result = 2769 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2770 { 2771 std::lock_guard<std::mutex> lock(global_lock); 2772 if (result == VK_SUCCESS) { 2773 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2774 } 2775 } 2776 return result; 2777} 2778 2779VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, 2780 uint32_t queueFamilyIndex, xcb_connection_t *connection, 2781 xcb_visualid_t visual_id) { 2782 bool skip_call = false; 2783 { 2784 std::lock_guard<std::mutex> lock(global_lock); 2785 skip_call |= 2786 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2787 } 2788 if (skip_call) { 2789 return VK_FALSE; 2790 } 2791 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2792 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id); 2793 return result; 2794} 2795#endif // VK_USE_PLATFORM_XCB_KHR 2796 2797#ifdef VK_USE_PLATFORM_XLIB_KHR 2798VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo, 2799 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2800 bool skip_call = false; 2801 { 2802 std::lock_guard<std::mutex> lock(global_lock); 2803 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2804 } 2805 if (skip_call) { 2806 return VK_ERROR_VALIDATION_FAILED_EXT; 2807 } 2808 VkResult result = 2809 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2810 { 2811 std::lock_guard<std::mutex> lock(global_lock); 2812 if (result == VK_SUCCESS) { 2813 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2814 } 2815 } 2816 return result; 2817} 2818 2819VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, 2820 uint32_t queueFamilyIndex, Display *dpy, 2821 VisualID visualID) { 2822 bool skip_call = false; 2823 { 2824 std::lock_guard<std::mutex> lock(global_lock); 2825 skip_call |= 2826 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2827 } 2828 if (skip_call) { 2829 return VK_FALSE; 2830 } 2831 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2832 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID); 2833 return result; 2834} 2835#endif // VK_USE_PLATFORM_XLIB_KHR 2836 2837#ifdef VK_USE_PLATFORM_MIR_KHR 2838VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo, 2839 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2840 bool skip_call = false; 2841 { 2842 std::lock_guard<std::mutex> lock(global_lock); 2843 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2844 } 2845 if (skip_call) { 2846 return VK_ERROR_VALIDATION_FAILED_EXT; 2847 } 2848 VkResult result = 2849 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2850 { 2851 std::lock_guard<std::mutex> lock(global_lock); 2852 if (result == VK_SUCCESS) { 2853 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2854 } 2855 } 2856 return result; 2857} 2858 2859VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice, 2860 uint32_t queueFamilyIndex, MirConnection *connection) { 2861 bool skip_call = false; 2862 { 2863 std::lock_guard<std::mutex> lock(global_lock); 2864 skip_call |= 2865 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2866 } 2867 if (skip_call) { 2868 return VK_FALSE; 2869 } 2870 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2871 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection); 2872 return result; 2873} 2874#endif // VK_USE_PLATFORM_MIR_KHR 2875 2876#ifdef VK_USE_PLATFORM_WAYLAND_KHR 2877VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, 2878 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2879 bool skip_call = false; 2880 { 2881 std::lock_guard<std::mutex> lock(global_lock); 2882 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2883 } 2884 if (skip_call) { 2885 return VK_ERROR_VALIDATION_FAILED_EXT; 2886 } 2887 VkResult result = 2888 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2889 { 2890 std::lock_guard<std::mutex> lock(global_lock); 2891 if (result == VK_SUCCESS) { 2892 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2893 } 2894 } 2895 return result; 2896} 2897 2898VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, 2899 uint32_t queueFamilyIndex, 2900 struct wl_display *display) { 2901 bool skip_call = false; 2902 { 2903 std::lock_guard<std::mutex> lock(global_lock); 2904 skip_call |= 2905 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 2906 } 2907 if (skip_call) { 2908 return VK_FALSE; 2909 } 2910 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice) 2911 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display); 2912 return result; 2913} 2914#endif // VK_USE_PLATFORM_WAYLAND_KHR 2915 2916#ifdef VK_USE_PLATFORM_ANDROID_KHR 2917VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo, 2918 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) { 2919 bool skip_call = false; 2920 { 2921 std::lock_guard<std::mutex> lock(global_lock); 2922 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false); 2923 } 2924 if (skip_call) { 2925 return VK_ERROR_VALIDATION_FAILED_EXT; 2926 } 2927 VkResult result = 2928 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); 2929 { 2930 std::lock_guard<std::mutex> lock(global_lock); 2931 if (result == VK_SUCCESS) { 2932 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator); 2933 } 2934 } 2935 return result; 2936} 2937#endif // VK_USE_PLATFORM_ANDROID_KHR 2938 2939VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, 2940 const VkSwapchainCreateInfoKHR *pCreateInfos, 2941 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) { 2942 bool skip_call = false; 2943 uint32_t i = 0; 2944 { 2945 std::lock_guard<std::mutex> lock(global_lock); 2946 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 2947 if (NULL != pCreateInfos) { 2948 for (i = 0; i < swapchainCount; i++) { 2949 skip_call |= ValidateObject(device, pCreateInfos[i].oldSwapchain, 2950 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true); 2951 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 2952 skip_call |= ValidateObject(device_data->physical_device, pCreateInfos[i].surface, 2953 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false); 2954 } 2955 } 2956 } 2957 if (skip_call) { 2958 return VK_ERROR_VALIDATION_FAILED_EXT; 2959 } 2960 VkResult result = 2961 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); 2962 { 2963 std::lock_guard<std::mutex> lock(global_lock); 2964 if (result == VK_SUCCESS) { 2965 for (i = 0; i < swapchainCount; i++) { 2966 CreateObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator); 2967 } 2968 } 2969 } 2970 return result; 2971} 2972 2973VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance, 2974 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 2975 const VkAllocationCallbacks *pAllocator, 2976 VkDebugReportCallbackEXT *pCallback) { 2977 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance); 2978 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback); 2979 if (VK_SUCCESS == result) { 2980 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2981 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback); 2982 CreateObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator); 2983 } 2984 return result; 2985} 2986 2987VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback, 2988 const VkAllocationCallbacks *pAllocator) { 2989 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance); 2990 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 2991 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 2992 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator); 2993 DestroyObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator); 2994} 2995 2996VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, 2997 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location, 2998 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 2999 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance); 3000 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 3001} 3002 3003static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}}; 3004 3005static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker", 3006 VK_LAYER_API_VERSION, // specVersion 3007 1, // implementationVersion 3008 "LunarG Validation Layer"}; 3009 3010VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { 3011 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties); 3012} 3013 3014VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 3015 VkLayerProperties *pProperties) { 3016 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties); 3017} 3018 3019VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 3020 VkExtensionProperties *pProperties) { 3021 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName)) 3022 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties); 3023 3024 return VK_ERROR_LAYER_NOT_PRESENT; 3025} 3026 3027VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, 3028 uint32_t *pCount, VkExtensionProperties *pProperties) { 3029 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName)) 3030 return util_GetExtensionProperties(0, nullptr, pCount, pProperties); 3031 3032 assert(physicalDevice); 3033 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice); 3034 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties); 3035} 3036 3037static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) { 3038 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map); 3039 return debug_report_get_instance_proc_addr(instance_data->report_data, name); 3040} 3041 3042static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) { 3043 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance); 3044 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled) 3045 return nullptr; 3046 3047 if (!strcmp("vkDestroySurfaceKHR", name)) 3048 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR); 3049 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name)) 3050 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR); 3051 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name)) 3052 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR); 3053 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name)) 3054 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR); 3055 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name)) 3056 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR); 3057 3058#ifdef VK_USE_PLATFORM_WIN32_KHR 3059 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name)) 3060 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR); 3061 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name)) 3062 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR); 3063#endif // VK_USE_PLATFORM_WIN32_KHR 3064#ifdef VK_USE_PLATFORM_XCB_KHR 3065 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name)) 3066 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR); 3067 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name)) 3068 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR); 3069#endif // VK_USE_PLATFORM_XCB_KHR 3070#ifdef VK_USE_PLATFORM_XLIB_KHR 3071 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name)) 3072 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR); 3073 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name)) 3074 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR); 3075#endif // VK_USE_PLATFORM_XLIB_KHR 3076#ifdef VK_USE_PLATFORM_MIR_KHR 3077 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name)) 3078 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR); 3079 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name)) 3080 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR); 3081#endif // VK_USE_PLATFORM_MIR_KHR 3082#ifdef VK_USE_PLATFORM_WAYLAND_KHR 3083 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name)) 3084 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR); 3085 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name)) 3086 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR); 3087#endif // VK_USE_PLATFORM_WAYLAND_KHR 3088#ifdef VK_USE_PLATFORM_ANDROID_KHR 3089 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name)) 3090 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR); 3091#endif // VK_USE_PLATFORM_ANDROID_KHR 3092 3093 return nullptr; 3094} 3095 3096static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) { 3097 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3098 device_data->wsi_enabled = false; 3099 device_data->wsi_display_swapchain_enabled = false; 3100 device_data->objtrack_extensions_enabled = false; 3101 3102 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 3103 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) { 3104 device_data->wsi_enabled = true; 3105 } 3106 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) { 3107 device_data->wsi_display_swapchain_enabled = true; 3108 } 3109 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) { 3110 device_data->objtrack_extensions_enabled = true; 3111 } 3112 } 3113} 3114 3115static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) { 3116 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance); 3117 3118 3119 instanceExtMap[pDisp] = {}; 3120 3121 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { 3122 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) { 3123 instanceExtMap[pDisp].wsi_enabled = true; 3124 } 3125#ifdef VK_USE_PLATFORM_XLIB_KHR 3126 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) { 3127 instanceExtMap[pDisp].xlib_enabled = true; 3128 } 3129#endif 3130#ifdef VK_USE_PLATFORM_XCB_KHR 3131 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) { 3132 instanceExtMap[pDisp].xcb_enabled = true; 3133 } 3134#endif 3135#ifdef VK_USE_PLATFORM_WAYLAND_KHR 3136 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) { 3137 instanceExtMap[pDisp].wayland_enabled = true; 3138 } 3139#endif 3140#ifdef VK_USE_PLATFORM_MIR_KHR 3141 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) { 3142 instanceExtMap[pDisp].mir_enabled = true; 3143 } 3144#endif 3145#ifdef VK_USE_PLATFORM_ANDROID_KHR 3146 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) { 3147 instanceExtMap[pDisp].android_enabled = true; 3148 } 3149#endif 3150#ifdef VK_USE_PLATFORM_WIN32_KHR 3151 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) { 3152 instanceExtMap[pDisp].win32_enabled = true; 3153 } 3154#endif 3155 } 3156} 3157 3158VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, 3159 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { 3160 std::lock_guard<std::mutex> lock(global_lock); 3161 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 3162 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 3163 3164 assert(chain_info->u.pLayerInfo); 3165 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 3166 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr; 3167 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice"); 3168 if (fpCreateDevice == NULL) { 3169 return VK_ERROR_INITIALIZATION_FAILED; 3170 } 3171 3172 // Advance the link info for the next element on the chain 3173 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 3174 3175 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice); 3176 if (result != VK_SUCCESS) { 3177 return result; 3178 } 3179 3180 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map); 3181 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice); 3182 3183 // Add link back to physDev 3184 device_data->physical_device = physicalDevice; 3185 3186 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map); 3187 3188 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice); 3189 CreateObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator); 3190 3191 return result; 3192} 3193 3194VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, 3195 uint32_t *pQueueFamilyPropertyCount, 3196 VkQueueFamilyProperties *pQueueFamilyProperties) { 3197 get_dispatch_table(ot_instance_table_map, physicalDevice) 3198 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); 3199 std::lock_guard<std::mutex> lock(global_lock); 3200 if (pQueueFamilyProperties != NULL) { 3201 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map); 3202 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) { 3203 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]); 3204 } 3205 } 3206} 3207 3208VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, 3209 VkInstance *pInstance) { 3210 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); 3211 3212 assert(chain_info->u.pLayerInfo); 3213 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; 3214 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance"); 3215 if (fpCreateInstance == NULL) { 3216 return VK_ERROR_INITIALIZATION_FAILED; 3217 } 3218 3219 // Advance the link info for the next element on the chain 3220 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; 3221 3222 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance); 3223 if (result != VK_SUCCESS) { 3224 return result; 3225 } 3226 3227 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map); 3228 instance_data->instance = *pInstance; 3229 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map); 3230 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance); 3231 3232 // Look for one or more debug report create info structures, and copy the 3233 // callback(s) for each one found (for use by vkDestroyInstance) 3234 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos, 3235 &instance_data->tmp_callbacks); 3236 3237 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount, 3238 pCreateInfo->ppEnabledExtensionNames); 3239 3240 InitObjectTracker(instance_data, pAllocator); 3241 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance); 3242 3243 CreateObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator); 3244 3245 return result; 3246} 3247 3248VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, 3249 VkPhysicalDevice *pPhysicalDevices) { 3250 bool skip_call = VK_FALSE; 3251 std::unique_lock<std::mutex> lock(global_lock); 3252 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false, VALIDATION_ERROR_00023); 3253 lock.unlock(); 3254 if (skip_call) { 3255 return VK_ERROR_VALIDATION_FAILED_EXT; 3256 } 3257 VkResult result = get_dispatch_table(ot_instance_table_map, instance) 3258 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); 3259 lock.lock(); 3260 if (result == VK_SUCCESS) { 3261 if (pPhysicalDevices) { 3262 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) { 3263 CreateObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, nullptr); 3264 } 3265 } 3266 } 3267 lock.unlock(); 3268 return result; 3269} 3270 3271VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) { 3272 std::unique_lock<std::mutex> lock(global_lock); 3273 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00062); 3274 lock.unlock(); 3275 3276 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); 3277 3278 lock.lock(); 3279 3280 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT); 3281 AddQueueInfo(device, queueFamilyIndex, *pQueue); 3282} 3283 3284VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) { 3285 bool skip = false; 3286 std::unique_lock<std::mutex> lock(global_lock); 3287 skip |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00621); 3288 skip |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, true, VALIDATION_ERROR_00622); 3289 lock.unlock(); 3290 if (!skip) { 3291 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator); 3292 3293 lock.lock(); 3294 DestroyObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator); 3295 } 3296} 3297 3298VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, 3299 VkMemoryMapFlags flags, void **ppData) { 3300 bool skip_call = VK_FALSE; 3301 std::unique_lock<std::mutex> lock(global_lock); 3302 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00631); 3303 lock.unlock(); 3304 if (skip_call == VK_TRUE) { 3305 return VK_ERROR_VALIDATION_FAILED_EXT; 3306 } 3307 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData); 3308 return result; 3309} 3310 3311VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) { 3312 bool skip_call = VK_FALSE; 3313 std::unique_lock<std::mutex> lock(global_lock); 3314 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00650); 3315 lock.unlock(); 3316 if (skip_call == VK_TRUE) { 3317 return; 3318 } 3319 3320 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory); 3321} 3322VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, 3323 VkFence fence) { 3324 std::unique_lock<std::mutex> lock(global_lock); 3325 ValidateQueueFlags(queue, "QueueBindSparse"); 3326 3327 for (uint32_t i = 0; i < bindInfoCount; i++) { 3328 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++) 3329 ValidateObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, 3330 false); 3331 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++) 3332 ValidateObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 3333 false); 3334 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++) 3335 ValidateObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false); 3336 } 3337 lock.unlock(); 3338 3339 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence); 3340 return result; 3341} 3342 3343VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, 3344 VkCommandBuffer *pCommandBuffers) { 3345 bool skip_call = VK_FALSE; 3346 std::unique_lock<std::mutex> lock(global_lock); 3347 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00084); 3348 skip_call |= ValidateObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false, 3349 VALIDATION_ERROR_00090); 3350 lock.unlock(); 3351 3352 if (skip_call) { 3353 return VK_ERROR_VALIDATION_FAILED_EXT; 3354 } 3355 3356 VkResult result = 3357 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); 3358 3359 lock.lock(); 3360 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) { 3361 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i], 3362 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level); 3363 } 3364 lock.unlock(); 3365 3366 return result; 3367} 3368 3369VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, 3370 VkDescriptorSet *pDescriptorSets) { 3371 bool skip_call = VK_FALSE; 3372 std::unique_lock<std::mutex> lock(global_lock); 3373 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00908); 3374 skip_call |= ValidateObject(device, pAllocateInfo->descriptorPool, 3375 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false); 3376 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) { 3377 skip_call |= ValidateObject(device, pAllocateInfo->pSetLayouts[i], 3378 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false); 3379 } 3380 lock.unlock(); 3381 if (skip_call) { 3382 return VK_ERROR_VALIDATION_FAILED_EXT; 3383 } 3384 3385 VkResult result = 3386 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); 3387 3388 if (VK_SUCCESS == result) { 3389 lock.lock(); 3390 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) { 3391 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i], 3392 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT); 3393 } 3394 lock.unlock(); 3395 } 3396 3397 return result; 3398} 3399 3400VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, 3401 const VkCommandBuffer *pCommandBuffers) { 3402 bool skip_call = false; 3403 std::unique_lock<std::mutex> lock(global_lock); 3404 ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false, VALIDATION_ERROR_00099); 3405 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00098); 3406 for (uint32_t i = 0; i < commandBufferCount; i++) { 3407 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]); 3408 } 3409 3410 for (uint32_t i = 0; i < commandBufferCount; i++) { 3411 DestroyObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, nullptr); 3412 } 3413 3414 lock.unlock(); 3415 if (!skip_call) { 3416 get_dispatch_table(ot_device_table_map, device) 3417 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); 3418 } 3419} 3420VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) { 3421 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3422 std::unique_lock<std::mutex> lock(global_lock); 3423 // A swapchain's images are implicitly deleted when the swapchain is deleted. 3424 // Remove this swapchain's images from our map of such images. 3425 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin(); 3426 while (itr != device_data->swapchainImageMap.end()) { 3427 OBJTRACK_NODE *pNode = (*itr).second; 3428 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) { 3429 delete pNode; 3430 auto delete_item = itr++; 3431 device_data->swapchainImageMap.erase(delete_item); 3432 } else { 3433 ++itr; 3434 } 3435 } 3436 DestroyObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator); 3437 lock.unlock(); 3438 3439 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator); 3440} 3441 3442VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, 3443 const VkDescriptorSet *pDescriptorSets) { 3444 bool skip_call = false; 3445 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT; 3446 std::unique_lock<std::mutex> lock(global_lock); 3447 skip_call |= 3448 ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false, VALIDATION_ERROR_00924); 3449 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00923); 3450 for (uint32_t i = 0; i < descriptorSetCount; i++) { 3451 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]); 3452 } 3453 3454 for (uint32_t i = 0; i < descriptorSetCount; i++) { 3455 DestroyObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr); 3456 } 3457 3458 lock.unlock(); 3459 if (!skip_call) { 3460 result = get_dispatch_table(ot_device_table_map, device) 3461 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); 3462 } 3463 return result; 3464} 3465 3466VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, 3467 const VkAllocationCallbacks *pAllocator) { 3468 bool skip_call = VK_FALSE; 3469 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3470 std::unique_lock<std::mutex> lock(global_lock); 3471 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00904); 3472 skip_call |= 3473 ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, true, VALIDATION_ERROR_00905); 3474 lock.unlock(); 3475 if (skip_call) { 3476 return; 3477 } 3478 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted. 3479 // Remove this pool's descriptor sets from our descriptorSet map. 3480 lock.lock(); 3481 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = 3482 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin(); 3483 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) { 3484 OBJTRACK_NODE *pNode = (*itr).second; 3485 auto del_itr = itr++; 3486 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) { 3487 DestroyObject(device, (VkDescriptorSet)((*del_itr).first), 3488 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr); 3489 } 3490 } 3491 DestroyObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator); 3492 lock.unlock(); 3493 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator); 3494} 3495 3496VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) { 3497 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 3498 bool skip_call = false; 3499 std::unique_lock<std::mutex> lock(global_lock); 3500 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00080); 3501 skip_call |= ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, true, VALIDATION_ERROR_00081); 3502 lock.unlock(); 3503 if (skip_call) { 3504 return; 3505 } 3506 lock.lock(); 3507 // A CommandPool's command buffers are implicitly deleted when the pool is deleted. 3508 // Remove this pool's cmdBuffers from our cmd buffer map. 3509 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin(); 3510 auto del_itr = itr; 3511 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) { 3512 OBJTRACK_NODE *pNode = (*itr).second; 3513 del_itr = itr++; 3514 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) { 3515 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first)); 3516 DestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first), 3517 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, nullptr); 3518 } 3519 } 3520 DestroyObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator); 3521 lock.unlock(); 3522 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator); 3523} 3524 3525VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, 3526 VkImage *pSwapchainImages) { 3527 bool skip_call = VK_FALSE; 3528 std::unique_lock<std::mutex> lock(global_lock); 3529 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3530 lock.unlock(); 3531 if (skip_call) { 3532 return VK_ERROR_VALIDATION_FAILED_EXT; 3533 } 3534 VkResult result = get_dispatch_table(ot_device_table_map, device) 3535 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); 3536 if (pSwapchainImages != NULL) { 3537 lock.lock(); 3538 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) { 3539 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain); 3540 } 3541 lock.unlock(); 3542 } 3543 return result; 3544} 3545 3546VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 3547 const VkGraphicsPipelineCreateInfo *pCreateInfos, 3548 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { 3549 bool skip_call = VK_FALSE; 3550 std::unique_lock<std::mutex> lock(global_lock); 3551 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00519); 3552 if (pCreateInfos) { 3553 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) { 3554 if (pCreateInfos[idx0].basePipelineHandle) { 3555 skip_call |= ValidateObject(device, pCreateInfos[idx0].basePipelineHandle, 3556 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true); 3557 } 3558 if (pCreateInfos[idx0].layout) { 3559 skip_call |= ValidateObject(device, pCreateInfos[idx0].layout, 3560 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false); 3561 } 3562 if (pCreateInfos[idx0].pStages) { 3563 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) { 3564 if (pCreateInfos[idx0].pStages[idx1].module) { 3565 skip_call |= ValidateObject(device, pCreateInfos[idx0].pStages[idx1].module, 3566 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false); 3567 } 3568 } 3569 } 3570 if (pCreateInfos[idx0].renderPass) { 3571 skip_call |= ValidateObject(device, pCreateInfos[idx0].renderPass, 3572 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false); 3573 } 3574 } 3575 } 3576 if (pipelineCache) { 3577 skip_call |= 3578 ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, true, VALIDATION_ERROR_00520); 3579 } 3580 lock.unlock(); 3581 if (skip_call) { 3582 return VK_ERROR_VALIDATION_FAILED_EXT; 3583 } 3584 VkResult result = get_dispatch_table(ot_device_table_map, device) 3585 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3586 lock.lock(); 3587 if (result == VK_SUCCESS) { 3588 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) { 3589 CreateObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator); 3590 } 3591 } 3592 lock.unlock(); 3593 return result; 3594} 3595 3596VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, 3597 const VkComputePipelineCreateInfo *pCreateInfos, 3598 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { 3599 bool skip_call = VK_FALSE; 3600 std::unique_lock<std::mutex> lock(global_lock); 3601 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false, VALIDATION_ERROR_00486); 3602 if (pCreateInfos) { 3603 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) { 3604 if (pCreateInfos[idx0].basePipelineHandle) { 3605 skip_call |= ValidateObject(device, pCreateInfos[idx0].basePipelineHandle, 3606 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true); 3607 } 3608 if (pCreateInfos[idx0].layout) { 3609 skip_call |= ValidateObject(device, pCreateInfos[idx0].layout, 3610 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false); 3611 } 3612 if (pCreateInfos[idx0].stage.module) { 3613 skip_call |= ValidateObject(device, pCreateInfos[idx0].stage.module, 3614 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false); 3615 } 3616 } 3617 } 3618 if (pipelineCache) { 3619 skip_call |= 3620 ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, true, VALIDATION_ERROR_00487); 3621 } 3622 lock.unlock(); 3623 if (skip_call) { 3624 return VK_ERROR_VALIDATION_FAILED_EXT; 3625 } 3626 VkResult result = get_dispatch_table(ot_device_table_map, device) 3627 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); 3628 lock.lock(); 3629 if (result == VK_SUCCESS) { 3630 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) { 3631 CreateObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator); 3632 } 3633 } 3634 lock.unlock(); 3635 return result; 3636} 3637 3638// VK_EXT_debug_marker Extension 3639VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(VkDevice device, VkDebugMarkerObjectTagInfoEXT *pTagInfo) { 3640 bool skip_call = VK_FALSE; 3641 std::unique_lock<std::mutex> lock(global_lock); 3642 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3643 lock.unlock(); 3644 if (skip_call) { 3645 return VK_ERROR_VALIDATION_FAILED_EXT; 3646 } 3647 VkResult result = get_dispatch_table(ot_device_table_map, device)->DebugMarkerSetObjectTagEXT(device, pTagInfo); 3648 return result; 3649} 3650 3651VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(VkDevice device, VkDebugMarkerObjectNameInfoEXT *pNameInfo) { 3652 bool skip_call = VK_FALSE; 3653 std::unique_lock<std::mutex> lock(global_lock); 3654 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3655 lock.unlock(); 3656 if (skip_call) { 3657 return VK_ERROR_VALIDATION_FAILED_EXT; 3658 } 3659 VkResult result = get_dispatch_table(ot_device_table_map, device)->DebugMarkerSetObjectNameEXT(device, pNameInfo); 3660 return result; 3661} 3662 3663VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, VkDebugMarkerMarkerInfoEXT *pMarkerInfo) { 3664 bool skip_call = VK_FALSE; 3665 std::unique_lock<std::mutex> lock(global_lock); 3666 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false); 3667 lock.unlock(); 3668 if (!skip_call) { 3669 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo); 3670 } 3671} 3672 3673VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) { 3674 bool skip_call = VK_FALSE; 3675 std::unique_lock<std::mutex> lock(global_lock); 3676 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false); 3677 lock.unlock(); 3678 if (!skip_call) { 3679 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerEndEXT(commandBuffer); 3680 } 3681} 3682 3683VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, VkDebugMarkerMarkerInfoEXT *pMarkerInfo) { 3684 bool skip_call = VK_FALSE; 3685 std::unique_lock<std::mutex> lock(global_lock); 3686 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false); 3687 lock.unlock(); 3688 if (!skip_call) { 3689 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo); 3690 } 3691} 3692 3693// VK_NV_external_memory_capabilities Extension 3694VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV( 3695 VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, 3696 VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, 3697 VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) { 3698 3699 bool skip_call = false; 3700 { 3701 std::lock_guard<std::mutex> lock(global_lock); 3702 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false); 3703 } 3704 if (skip_call) { 3705 return VK_ERROR_VALIDATION_FAILED_EXT; 3706 } 3707 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice) 3708 ->GetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags, 3709 externalHandleType, pExternalImageFormatProperties); 3710 return result; 3711} 3712 3713#ifdef VK_USE_PLATFORM_WIN32_KHR 3714// VK_NV_external_memory_win32 Extension 3715VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory, 3716 VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE *pHandle) { 3717 bool skip_call = VK_FALSE; 3718 std::unique_lock<std::mutex> lock(global_lock); 3719 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3720 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false); 3721 lock.unlock(); 3722 if (skip_call) { 3723 return VK_ERROR_VALIDATION_FAILED_EXT; 3724 } 3725 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetMemoryWin32HandleNV(device, memory, handleType, pHandle); 3726 return result; 3727} 3728#endif // VK_USE_PLATFORM_WIN32_KHR 3729 3730// VK_AMD_draw_indirect_count Extension 3731VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 3732 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, 3733 uint32_t stride) { 3734 bool skip_call = VK_FALSE; 3735 std::unique_lock<std::mutex> lock(global_lock); 3736 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3737 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false); 3738 lock.unlock(); 3739 if (!skip_call) { 3740 get_dispatch_table(ot_device_table_map, commandBuffer) 3741 ->CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); 3742 } 3743} 3744 3745VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, 3746 VkBuffer countBuffer, VkDeviceSize countBufferOffset, 3747 uint32_t maxDrawCount, uint32_t stride) { 3748 bool skip_call = VK_FALSE; 3749 std::unique_lock<std::mutex> lock(global_lock); 3750 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false); 3751 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false); 3752 lock.unlock(); 3753 if (!skip_call) { 3754 get_dispatch_table(ot_device_table_map, commandBuffer) 3755 ->CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); 3756 } 3757} 3758 3759 3760static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) { 3761 if (!name || name[0] != 'v' || name[1] != 'k') 3762 return NULL; 3763 3764 name += 2; 3765 if (!strcmp(name, "GetDeviceProcAddr")) 3766 return (PFN_vkVoidFunction)GetDeviceProcAddr; 3767 if (!strcmp(name, "DestroyDevice")) 3768 return (PFN_vkVoidFunction)DestroyDevice; 3769 if (!strcmp(name, "GetDeviceQueue")) 3770 return (PFN_vkVoidFunction)GetDeviceQueue; 3771 if (!strcmp(name, "QueueSubmit")) 3772 return (PFN_vkVoidFunction)QueueSubmit; 3773 if (!strcmp(name, "QueueWaitIdle")) 3774 return (PFN_vkVoidFunction)QueueWaitIdle; 3775 if (!strcmp(name, "DeviceWaitIdle")) 3776 return (PFN_vkVoidFunction)DeviceWaitIdle; 3777 if (!strcmp(name, "AllocateMemory")) 3778 return (PFN_vkVoidFunction)AllocateMemory; 3779 if (!strcmp(name, "FreeMemory")) 3780 return (PFN_vkVoidFunction)FreeMemory; 3781 if (!strcmp(name, "MapMemory")) 3782 return (PFN_vkVoidFunction)MapMemory; 3783 if (!strcmp(name, "UnmapMemory")) 3784 return (PFN_vkVoidFunction)UnmapMemory; 3785 if (!strcmp(name, "FlushMappedMemoryRanges")) 3786 return (PFN_vkVoidFunction)FlushMappedMemoryRanges; 3787 if (!strcmp(name, "InvalidateMappedMemoryRanges")) 3788 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges; 3789 if (!strcmp(name, "GetDeviceMemoryCommitment")) 3790 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment; 3791 if (!strcmp(name, "BindBufferMemory")) 3792 return (PFN_vkVoidFunction)BindBufferMemory; 3793 if (!strcmp(name, "BindImageMemory")) 3794 return (PFN_vkVoidFunction)BindImageMemory; 3795 if (!strcmp(name, "GetBufferMemoryRequirements")) 3796 return (PFN_vkVoidFunction)GetBufferMemoryRequirements; 3797 if (!strcmp(name, "GetImageMemoryRequirements")) 3798 return (PFN_vkVoidFunction)GetImageMemoryRequirements; 3799 if (!strcmp(name, "GetImageSparseMemoryRequirements")) 3800 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements; 3801 if (!strcmp(name, "QueueBindSparse")) 3802 return (PFN_vkVoidFunction)QueueBindSparse; 3803 if (!strcmp(name, "CreateFence")) 3804 return (PFN_vkVoidFunction)CreateFence; 3805 if (!strcmp(name, "DestroyFence")) 3806 return (PFN_vkVoidFunction)DestroyFence; 3807 if (!strcmp(name, "ResetFences")) 3808 return (PFN_vkVoidFunction)ResetFences; 3809 if (!strcmp(name, "GetFenceStatus")) 3810 return (PFN_vkVoidFunction)GetFenceStatus; 3811 if (!strcmp(name, "WaitForFences")) 3812 return (PFN_vkVoidFunction)WaitForFences; 3813 if (!strcmp(name, "CreateSemaphore")) 3814 return (PFN_vkVoidFunction)CreateSemaphore; 3815 if (!strcmp(name, "DestroySemaphore")) 3816 return (PFN_vkVoidFunction)DestroySemaphore; 3817 if (!strcmp(name, "CreateEvent")) 3818 return (PFN_vkVoidFunction)CreateEvent; 3819 if (!strcmp(name, "DestroyEvent")) 3820 return (PFN_vkVoidFunction)DestroyEvent; 3821 if (!strcmp(name, "GetEventStatus")) 3822 return (PFN_vkVoidFunction)GetEventStatus; 3823 if (!strcmp(name, "SetEvent")) 3824 return (PFN_vkVoidFunction)SetEvent; 3825 if (!strcmp(name, "ResetEvent")) 3826 return (PFN_vkVoidFunction)ResetEvent; 3827 if (!strcmp(name, "CreateQueryPool")) 3828 return (PFN_vkVoidFunction)CreateQueryPool; 3829 if (!strcmp(name, "DestroyQueryPool")) 3830 return (PFN_vkVoidFunction)DestroyQueryPool; 3831 if (!strcmp(name, "GetQueryPoolResults")) 3832 return (PFN_vkVoidFunction)GetQueryPoolResults; 3833 if (!strcmp(name, "CreateBuffer")) 3834 return (PFN_vkVoidFunction)CreateBuffer; 3835 if (!strcmp(name, "DestroyBuffer")) 3836 return (PFN_vkVoidFunction)DestroyBuffer; 3837 if (!strcmp(name, "CreateBufferView")) 3838 return (PFN_vkVoidFunction)CreateBufferView; 3839 if (!strcmp(name, "DestroyBufferView")) 3840 return (PFN_vkVoidFunction)DestroyBufferView; 3841 if (!strcmp(name, "CreateImage")) 3842 return (PFN_vkVoidFunction)CreateImage; 3843 if (!strcmp(name, "DestroyImage")) 3844 return (PFN_vkVoidFunction)DestroyImage; 3845 if (!strcmp(name, "GetImageSubresourceLayout")) 3846 return (PFN_vkVoidFunction)GetImageSubresourceLayout; 3847 if (!strcmp(name, "CreateImageView")) 3848 return (PFN_vkVoidFunction)CreateImageView; 3849 if (!strcmp(name, "DestroyImageView")) 3850 return (PFN_vkVoidFunction)DestroyImageView; 3851 if (!strcmp(name, "CreateShaderModule")) 3852 return (PFN_vkVoidFunction)CreateShaderModule; 3853 if (!strcmp(name, "DestroyShaderModule")) 3854 return (PFN_vkVoidFunction)DestroyShaderModule; 3855 if (!strcmp(name, "CreatePipelineCache")) 3856 return (PFN_vkVoidFunction)CreatePipelineCache; 3857 if (!strcmp(name, "DestroyPipelineCache")) 3858 return (PFN_vkVoidFunction)DestroyPipelineCache; 3859 if (!strcmp(name, "GetPipelineCacheData")) 3860 return (PFN_vkVoidFunction)GetPipelineCacheData; 3861 if (!strcmp(name, "MergePipelineCaches")) 3862 return (PFN_vkVoidFunction)MergePipelineCaches; 3863 if (!strcmp(name, "CreateGraphicsPipelines")) 3864 return (PFN_vkVoidFunction)CreateGraphicsPipelines; 3865 if (!strcmp(name, "CreateComputePipelines")) 3866 return (PFN_vkVoidFunction)CreateComputePipelines; 3867 if (!strcmp(name, "DestroyPipeline")) 3868 return (PFN_vkVoidFunction)DestroyPipeline; 3869 if (!strcmp(name, "CreatePipelineLayout")) 3870 return (PFN_vkVoidFunction)CreatePipelineLayout; 3871 if (!strcmp(name, "DestroyPipelineLayout")) 3872 return (PFN_vkVoidFunction)DestroyPipelineLayout; 3873 if (!strcmp(name, "CreateSampler")) 3874 return (PFN_vkVoidFunction)CreateSampler; 3875 if (!strcmp(name, "DestroySampler")) 3876 return (PFN_vkVoidFunction)DestroySampler; 3877 if (!strcmp(name, "CreateDescriptorSetLayout")) 3878 return (PFN_vkVoidFunction)CreateDescriptorSetLayout; 3879 if (!strcmp(name, "DestroyDescriptorSetLayout")) 3880 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout; 3881 if (!strcmp(name, "CreateDescriptorPool")) 3882 return (PFN_vkVoidFunction)CreateDescriptorPool; 3883 if (!strcmp(name, "DestroyDescriptorPool")) 3884 return (PFN_vkVoidFunction)DestroyDescriptorPool; 3885 if (!strcmp(name, "ResetDescriptorPool")) 3886 return (PFN_vkVoidFunction)ResetDescriptorPool; 3887 if (!strcmp(name, "AllocateDescriptorSets")) 3888 return (PFN_vkVoidFunction)AllocateDescriptorSets; 3889 if (!strcmp(name, "FreeDescriptorSets")) 3890 return (PFN_vkVoidFunction)FreeDescriptorSets; 3891 if (!strcmp(name, "UpdateDescriptorSets")) 3892 return (PFN_vkVoidFunction)UpdateDescriptorSets; 3893 if (!strcmp(name, "CreateFramebuffer")) 3894 return (PFN_vkVoidFunction)CreateFramebuffer; 3895 if (!strcmp(name, "DestroyFramebuffer")) 3896 return (PFN_vkVoidFunction)DestroyFramebuffer; 3897 if (!strcmp(name, "CreateRenderPass")) 3898 return (PFN_vkVoidFunction)CreateRenderPass; 3899 if (!strcmp(name, "DestroyRenderPass")) 3900 return (PFN_vkVoidFunction)DestroyRenderPass; 3901 if (!strcmp(name, "GetRenderAreaGranularity")) 3902 return (PFN_vkVoidFunction)GetRenderAreaGranularity; 3903 if (!strcmp(name, "CreateCommandPool")) 3904 return (PFN_vkVoidFunction)CreateCommandPool; 3905 if (!strcmp(name, "DestroyCommandPool")) 3906 return (PFN_vkVoidFunction)DestroyCommandPool; 3907 if (!strcmp(name, "ResetCommandPool")) 3908 return (PFN_vkVoidFunction)ResetCommandPool; 3909 if (!strcmp(name, "AllocateCommandBuffers")) 3910 return (PFN_vkVoidFunction)AllocateCommandBuffers; 3911 if (!strcmp(name, "FreeCommandBuffers")) 3912 return (PFN_vkVoidFunction)FreeCommandBuffers; 3913 if (!strcmp(name, "BeginCommandBuffer")) 3914 return (PFN_vkVoidFunction)BeginCommandBuffer; 3915 if (!strcmp(name, "EndCommandBuffer")) 3916 return (PFN_vkVoidFunction)EndCommandBuffer; 3917 if (!strcmp(name, "ResetCommandBuffer")) 3918 return (PFN_vkVoidFunction)ResetCommandBuffer; 3919 if (!strcmp(name, "CmdBindPipeline")) 3920 return (PFN_vkVoidFunction)CmdBindPipeline; 3921 if (!strcmp(name, "CmdSetViewport")) 3922 return (PFN_vkVoidFunction)CmdSetViewport; 3923 if (!strcmp(name, "CmdSetScissor")) 3924 return (PFN_vkVoidFunction)CmdSetScissor; 3925 if (!strcmp(name, "CmdSetLineWidth")) 3926 return (PFN_vkVoidFunction)CmdSetLineWidth; 3927 if (!strcmp(name, "CmdSetDepthBias")) 3928 return (PFN_vkVoidFunction)CmdSetDepthBias; 3929 if (!strcmp(name, "CmdSetBlendConstants")) 3930 return (PFN_vkVoidFunction)CmdSetBlendConstants; 3931 if (!strcmp(name, "CmdSetDepthBounds")) 3932 return (PFN_vkVoidFunction)CmdSetDepthBounds; 3933 if (!strcmp(name, "CmdSetStencilCompareMask")) 3934 return (PFN_vkVoidFunction)CmdSetStencilCompareMask; 3935 if (!strcmp(name, "CmdSetStencilWriteMask")) 3936 return (PFN_vkVoidFunction)CmdSetStencilWriteMask; 3937 if (!strcmp(name, "CmdSetStencilReference")) 3938 return (PFN_vkVoidFunction)CmdSetStencilReference; 3939 if (!strcmp(name, "CmdBindDescriptorSets")) 3940 return (PFN_vkVoidFunction)CmdBindDescriptorSets; 3941 if (!strcmp(name, "CmdBindIndexBuffer")) 3942 return (PFN_vkVoidFunction)CmdBindIndexBuffer; 3943 if (!strcmp(name, "CmdBindVertexBuffers")) 3944 return (PFN_vkVoidFunction)CmdBindVertexBuffers; 3945 if (!strcmp(name, "CmdDraw")) 3946 return (PFN_vkVoidFunction)CmdDraw; 3947 if (!strcmp(name, "CmdDrawIndexed")) 3948 return (PFN_vkVoidFunction)CmdDrawIndexed; 3949 if (!strcmp(name, "CmdDrawIndirect")) 3950 return (PFN_vkVoidFunction)CmdDrawIndirect; 3951 if (!strcmp(name, "CmdDrawIndexedIndirect")) 3952 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect; 3953 if (!strcmp(name, "CmdDispatch")) 3954 return (PFN_vkVoidFunction)CmdDispatch; 3955 if (!strcmp(name, "CmdDispatchIndirect")) 3956 return (PFN_vkVoidFunction)CmdDispatchIndirect; 3957 if (!strcmp(name, "CmdCopyBuffer")) 3958 return (PFN_vkVoidFunction)CmdCopyBuffer; 3959 if (!strcmp(name, "CmdCopyImage")) 3960 return (PFN_vkVoidFunction)CmdCopyImage; 3961 if (!strcmp(name, "CmdBlitImage")) 3962 return (PFN_vkVoidFunction)CmdBlitImage; 3963 if (!strcmp(name, "CmdCopyBufferToImage")) 3964 return (PFN_vkVoidFunction)CmdCopyBufferToImage; 3965 if (!strcmp(name, "CmdCopyImageToBuffer")) 3966 return (PFN_vkVoidFunction)CmdCopyImageToBuffer; 3967 if (!strcmp(name, "CmdUpdateBuffer")) 3968 return (PFN_vkVoidFunction)CmdUpdateBuffer; 3969 if (!strcmp(name, "CmdFillBuffer")) 3970 return (PFN_vkVoidFunction)CmdFillBuffer; 3971 if (!strcmp(name, "CmdClearColorImage")) 3972 return (PFN_vkVoidFunction)CmdClearColorImage; 3973 if (!strcmp(name, "CmdClearDepthStencilImage")) 3974 return (PFN_vkVoidFunction)CmdClearDepthStencilImage; 3975 if (!strcmp(name, "CmdClearAttachments")) 3976 return (PFN_vkVoidFunction)CmdClearAttachments; 3977 if (!strcmp(name, "CmdResolveImage")) 3978 return (PFN_vkVoidFunction)CmdResolveImage; 3979 if (!strcmp(name, "CmdSetEvent")) 3980 return (PFN_vkVoidFunction)CmdSetEvent; 3981 if (!strcmp(name, "CmdResetEvent")) 3982 return (PFN_vkVoidFunction)CmdResetEvent; 3983 if (!strcmp(name, "CmdWaitEvents")) 3984 return (PFN_vkVoidFunction)CmdWaitEvents; 3985 if (!strcmp(name, "CmdPipelineBarrier")) 3986 return (PFN_vkVoidFunction)CmdPipelineBarrier; 3987 if (!strcmp(name, "CmdBeginQuery")) 3988 return (PFN_vkVoidFunction)CmdBeginQuery; 3989 if (!strcmp(name, "CmdEndQuery")) 3990 return (PFN_vkVoidFunction)CmdEndQuery; 3991 if (!strcmp(name, "CmdResetQueryPool")) 3992 return (PFN_vkVoidFunction)CmdResetQueryPool; 3993 if (!strcmp(name, "CmdWriteTimestamp")) 3994 return (PFN_vkVoidFunction)CmdWriteTimestamp; 3995 if (!strcmp(name, "CmdCopyQueryPoolResults")) 3996 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults; 3997 if (!strcmp(name, "CmdPushConstants")) 3998 return (PFN_vkVoidFunction)CmdPushConstants; 3999 if (!strcmp(name, "CmdBeginRenderPass")) 4000 return (PFN_vkVoidFunction)CmdBeginRenderPass; 4001 if (!strcmp(name, "CmdNextSubpass")) 4002 return (PFN_vkVoidFunction)CmdNextSubpass; 4003 if (!strcmp(name, "CmdEndRenderPass")) 4004 return (PFN_vkVoidFunction)CmdEndRenderPass; 4005 if (!strcmp(name, "CmdExecuteCommands")) 4006 return (PFN_vkVoidFunction)CmdExecuteCommands; 4007 if (!strcmp(name, "DebugMarkerSetObjectTagEXT")) 4008 return (PFN_vkVoidFunction)DebugMarkerSetObjectTagEXT; 4009 if (!strcmp(name, "DebugMarkerSetObjectNameEXT")) 4010 return (PFN_vkVoidFunction)DebugMarkerSetObjectNameEXT; 4011 if (!strcmp(name, "CmdDebugMarkerBeginEXT")) 4012 return (PFN_vkVoidFunction)CmdDebugMarkerBeginEXT; 4013 if (!strcmp(name, "CmdDebugMarkerEndEXT")) 4014 return (PFN_vkVoidFunction)CmdDebugMarkerEndEXT; 4015 if (!strcmp(name, "CmdDebugMarkerInsertEXT")) 4016 return (PFN_vkVoidFunction)CmdDebugMarkerInsertEXT; 4017#ifdef VK_USE_PLATFORM_WIN32_KHR 4018 if (!strcmp(name, "GetMemoryWin32HandleNV")) 4019 return (PFN_vkVoidFunction)GetMemoryWin32HandleNV; 4020#endif // VK_USE_PLATFORM_WIN32_KHR 4021 if (!strcmp(name, "CmdDrawIndirectCountAMD")) 4022 return (PFN_vkVoidFunction)CmdDrawIndirectCountAMD; 4023 if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD")) 4024 return (PFN_vkVoidFunction)CmdDrawIndexedIndirectCountAMD; 4025 4026 return NULL; 4027} 4028static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) { 4029 if (!name || name[0] != 'v' || name[1] != 'k') 4030 return NULL; 4031 4032 name += 2; 4033 if (!strcmp(name, "CreateInstance")) 4034 return (PFN_vkVoidFunction)CreateInstance; 4035 if (!strcmp(name, "DestroyInstance")) 4036 return (PFN_vkVoidFunction)DestroyInstance; 4037 if (!strcmp(name, "EnumeratePhysicalDevices")) 4038 return (PFN_vkVoidFunction)EnumeratePhysicalDevices; 4039 if (!strcmp(name, "GetPhysicalDeviceFeatures")) 4040 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures; 4041 if (!strcmp(name, "GetPhysicalDeviceFormatProperties")) 4042 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties; 4043 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties")) 4044 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties; 4045 if (!strcmp(name, "GetPhysicalDeviceProperties")) 4046 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties; 4047 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties")) 4048 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties; 4049 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties")) 4050 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties; 4051 if (!strcmp(name, "GetInstanceProcAddr")) 4052 return (PFN_vkVoidFunction)GetInstanceProcAddr; 4053 if (!strcmp(name, "CreateDevice")) 4054 return (PFN_vkVoidFunction)CreateDevice; 4055 if (!strcmp(name, "EnumerateInstanceExtensionProperties")) 4056 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties; 4057 if (!strcmp(name, "EnumerateInstanceLayerProperties")) 4058 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties; 4059 if (!strcmp(name, "EnumerateDeviceLayerProperties")) 4060 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties; 4061 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties")) 4062 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties; 4063 if (!strcmp(name, "GetPhysicalDeviceExternalImageFormatPropertiesNV")) 4064 return (PFN_vkVoidFunction)GetPhysicalDeviceExternalImageFormatPropertiesNV; 4065 4066 return NULL; 4067} 4068 4069static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) { 4070 if (device) { 4071 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map); 4072 4073 if (device_data->wsi_enabled) { 4074 if (!strcmp("vkCreateSwapchainKHR", name)) 4075 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR); 4076 if (!strcmp("vkDestroySwapchainKHR", name)) 4077 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR); 4078 if (!strcmp("vkGetSwapchainImagesKHR", name)) 4079 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR); 4080 if (!strcmp("vkAcquireNextImageKHR", name)) 4081 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR); 4082 if (!strcmp("vkQueuePresentKHR", name)) 4083 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR); 4084 } 4085 4086 if (device_data->wsi_display_swapchain_enabled) { 4087 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) { 4088 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR); 4089 } 4090 } 4091 } 4092 4093 return nullptr; 4094} 4095 4096VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) { 4097 PFN_vkVoidFunction addr; 4098 addr = InterceptCoreDeviceCommand(funcName); 4099 if (addr) { 4100 return addr; 4101 } 4102 assert(device); 4103 4104 addr = InterceptWsiEnabledCommand(funcName, device); 4105 if (addr) { 4106 return addr; 4107 } 4108 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) { 4109 return NULL; 4110 } 4111 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName); 4112} 4113 4114VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) { 4115 PFN_vkVoidFunction addr; 4116 addr = InterceptCoreInstanceCommand(funcName); 4117 if (!addr) { 4118 addr = InterceptCoreDeviceCommand(funcName); 4119 } 4120 if (!addr) { 4121 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE)); 4122 } 4123 if (addr) { 4124 return addr; 4125 } 4126 assert(instance); 4127 4128 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance); 4129 if (addr) { 4130 return addr; 4131 } 4132 addr = InterceptWsiEnabledCommand(funcName, instance); 4133 if (addr) { 4134 return addr; 4135 } 4136 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) { 4137 return NULL; 4138 } 4139 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName); 4140} 4141 4142} // namespace object_tracker 4143 4144// vk_layer_logging.h expects these to be defined 4145VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance, 4146 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, 4147 const VkAllocationCallbacks *pAllocator, 4148 VkDebugReportCallbackEXT *pMsgCallback) { 4149 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback); 4150} 4151 4152VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback, 4153 const VkAllocationCallbacks *pAllocator) { 4154 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator); 4155} 4156 4157VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, 4158 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location, 4159 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { 4160 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg); 4161} 4162 4163// Loader-layer interface v0, just wrappers since there is only a layer 4164VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, 4165 VkExtensionProperties *pProperties) { 4166 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties); 4167} 4168 4169VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount, 4170 VkLayerProperties *pProperties) { 4171 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties); 4172} 4173 4174VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, 4175 VkLayerProperties *pProperties) { 4176 // The layer command handles VK_NULL_HANDLE just fine internally 4177 assert(physicalDevice == VK_NULL_HANDLE); 4178 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties); 4179} 4180 4181VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) { 4182 return object_tracker::GetDeviceProcAddr(dev, funcName); 4183} 4184 4185VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) { 4186 return object_tracker::GetInstanceProcAddr(instance, funcName); 4187} 4188 4189VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, 4190 const char *pLayerName, uint32_t *pCount, 4191 VkExtensionProperties *pProperties) { 4192 // The layer command handles VK_NULL_HANDLE just fine internally 4193 assert(physicalDevice == VK_NULL_HANDLE); 4194 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties); 4195} 4196