unique_objects.h revision 26f7026b125facaeaaf27b377181e7fa5d6a1ba9
1/* Copyright (c) 2015-2016 The Khronos Group Inc.
2 * Copyright (c) 2015-2016 Valve Corporation
3 * Copyright (c) 2015-2016 LunarG, Inc.
4 * Copyright (C) 2015-2016 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Tobin Ehlis <tobine@google.com>
19 */
20
21#include "vk_loader_platform.h"
22#include "vulkan/vulkan.h"
23
24#include <cinttypes>
25#include <memory>
26#include <stdio.h>
27#include <stdlib.h>
28#include <string.h>
29
30#include <unordered_map>
31#include <vector>
32#include <mutex>
33
34#include "vulkan/vk_layer.h"
35#include "vk_layer_config.h"
36#include "vk_layer_table.h"
37#include "vk_layer_data.h"
38#include "vk_layer_logging.h"
39#include "vk_layer_extension_utils.h"
40#include "vk_safe_struct.h"
41#include "vk_layer_utils.h"
42
43namespace unique_objects {
44
45// The display-server-specific WSI extensions are handled explicitly
46static const char *kUniqueObjectsSupportedInstanceExtensions =
47#ifdef VK_USE_PLATFORM_XLIB_KHR
48    VK_KHR_XLIB_SURFACE_EXTENSION_NAME
49#endif
50#ifdef VK_USE_PLATFORM_XCB_KHR
51    VK_KHR_XCB_SURFACE_EXTENSION_NAME
52#endif
53#ifdef VK_USE_PLATFORM_WAYLAND_KHR
54    VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME
55#endif
56#ifdef VK_USE_PLATFORM_MIR_KHR
57    VK_KHR_MIR_SURFACE_EXTENSION_NAME
58#endif
59#ifdef VK_USE_PLATFORM_ANDROID_KHR
60    VK_KHR_ANDROID_SURFACE_EXTENSION_NAME
61#endif
62#ifdef VK_USE_PLATFORM_WIN32_KHR
63    VK_KHR_WIN32_SURFACE_EXTENSION_NAME
64#endif
65    VK_EXT_DEBUG_MARKER_EXTENSION_NAME
66    VK_EXT_DEBUG_REPORT_EXTENSION_NAME
67    VK_KHR_DISPLAY_EXTENSION_NAME
68    VK_KHR_SURFACE_EXTENSION_NAME;
69
70static const char *kUniqueObjectsSupportedDeviceExtensions =
71    VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME
72    VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME
73    VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME
74    VK_AMD_GCN_SHADER_EXTENSION_NAME
75    VK_IMG_FILTER_CUBIC_EXTENSION_NAME
76    VK_IMG_FORMAT_PVRTC_EXTENSION_NAME
77    VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME
78    VK_KHR_SWAPCHAIN_EXTENSION_NAME
79    VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME
80    VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME
81    VK_NV_GLSL_SHADER_EXTENSION_NAME;
82
83// All increments must be guarded by global_lock
84static uint64_t global_unique_id = 1;
85
86struct layer_data {
87    VkInstance instance;
88
89    debug_report_data *report_data;
90    std::vector<VkDebugReportCallbackEXT> logging_callback;
91
92    // The following are for keeping track of the temporary callbacks that can
93    // be used in vkCreateInstance and vkDestroyInstance:
94    uint32_t num_tmp_callbacks;
95    VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos;
96    VkDebugReportCallbackEXT *tmp_callbacks;
97
98    bool wsi_enabled;
99    std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
100    VkPhysicalDevice gpu;
101
102    layer_data() : wsi_enabled(false), gpu(VK_NULL_HANDLE){};
103};
104
105struct instance_extension_enables {
106    bool wsi_enabled;
107    bool xlib_enabled;
108    bool xcb_enabled;
109    bool wayland_enabled;
110    bool mir_enabled;
111    bool android_enabled;
112    bool win32_enabled;
113    bool display_enabled;
114};
115
116static std::unordered_map<void *, struct instance_extension_enables> instanceExtMap;
117static std::unordered_map<void *, layer_data *> layer_data_map;
118static device_table_map unique_objects_device_table_map;
119static instance_table_map unique_objects_instance_table_map;
120static std::mutex global_lock; // Protect map accesses and unique_id increments
121
122struct GenericHeader {
123    VkStructureType sType;
124    void *pNext;
125};
126
127template <typename T> bool ContainsExtStruct(const T *target, VkStructureType ext_type) {
128    assert(target != nullptr);
129
130    const GenericHeader *ext_struct = reinterpret_cast<const GenericHeader *>(target->pNext);
131
132    while (ext_struct != nullptr) {
133        if (ext_struct->sType == ext_type) {
134            return true;
135        }
136
137        ext_struct = reinterpret_cast<const GenericHeader *>(ext_struct->pNext);
138    }
139
140    return false;
141}
142
143static void init_unique_objects(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
144    layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "google_unique_objects");
145}
146
147// Handle CreateInstance
148static void checkInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
149    uint32_t i;
150    VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(unique_objects_instance_table_map, instance);
151
152    instanceExtMap[pDisp] = {};
153
154    for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
155
156        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
157            instanceExtMap[pDisp].wsi_enabled = true;
158        }
159        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
160            instanceExtMap[pDisp].display_enabled = true;
161        }
162#ifdef VK_USE_PLATFORM_XLIB_KHR
163        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
164            instanceExtMap[pDisp].xlib_enabled = true;
165        }
166#endif
167#ifdef VK_USE_PLATFORM_XCB_KHR
168        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
169            instanceExtMap[pDisp].xcb_enabled = true;
170        }
171#endif
172#ifdef VK_USE_PLATFORM_WAYLAND_KHR
173        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
174            instanceExtMap[pDisp].wayland_enabled = true;
175        }
176#endif
177#ifdef VK_USE_PLATFORM_MIR_KHR
178        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
179            instanceExtMap[pDisp].mir_enabled = true;
180        }
181#endif
182#ifdef VK_USE_PLATFORM_ANDROID_KHR
183        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
184            instanceExtMap[pDisp].android_enabled = true;
185        }
186#endif
187#ifdef VK_USE_PLATFORM_WIN32_KHR
188        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
189            instanceExtMap[pDisp].win32_enabled = true;
190        }
191#endif
192
193        // Check for recognized instance extensions
194        layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
195        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedInstanceExtensions)) {
196            log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
197                    0, "UniqueObjects",
198                    "Instance Extension %s is not supported by this layer.  Using this extension may adversely affect "
199                    "validation results and/or produce undefined behavior.",
200                    pCreateInfo->ppEnabledExtensionNames[i]);
201        }
202    }
203}
204
205VkResult explicit_CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
206                                 VkInstance *pInstance) {
207    VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
208
209    assert(chain_info->u.pLayerInfo);
210    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
211    PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
212    if (fpCreateInstance == NULL) {
213        return VK_ERROR_INITIALIZATION_FAILED;
214    }
215
216    // Advance the link info for the next element on the chain
217    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
218
219    VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
220    if (result != VK_SUCCESS) {
221        return result;
222    }
223
224    layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
225    my_data->instance = *pInstance;
226    VkLayerInstanceDispatchTable *pTable = initInstanceTable(*pInstance, fpGetInstanceProcAddr, unique_objects_instance_table_map);
227
228    my_data->instance = *pInstance;
229    my_data->report_data = debug_report_create_instance(pTable, *pInstance, pCreateInfo->enabledExtensionCount,
230        pCreateInfo->ppEnabledExtensionNames);
231
232    // Set up temporary debug callbacks to output messages at CreateInstance-time
233    if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &my_data->num_tmp_callbacks, &my_data->tmp_dbg_create_infos,
234                                  &my_data->tmp_callbacks)) {
235        if (my_data->num_tmp_callbacks > 0) {
236            if (layer_enable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_dbg_create_infos,
237                                           my_data->tmp_callbacks)) {
238                layer_free_tmp_callbacks(my_data->tmp_dbg_create_infos, my_data->tmp_callbacks);
239                my_data->num_tmp_callbacks = 0;
240            }
241        }
242    }
243
244    init_unique_objects(my_data, pAllocator);
245    checkInstanceRegisterExtensions(pCreateInfo, *pInstance);
246
247    // Disable and free tmp callbacks, no longer necessary
248    if (my_data->num_tmp_callbacks > 0) {
249        layer_disable_tmp_callbacks(my_data->report_data, my_data->num_tmp_callbacks, my_data->tmp_callbacks);
250        layer_free_tmp_callbacks(my_data->tmp_dbg_create_infos, my_data->tmp_callbacks);
251        my_data->num_tmp_callbacks = 0;
252    }
253
254    return result;
255}
256
257void explicit_DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
258    dispatch_key key = get_dispatch_key(instance);
259    layer_data *my_data = get_my_data_ptr(key, layer_data_map);
260    VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(unique_objects_instance_table_map, instance);
261    instanceExtMap.erase(pDisp);
262    pDisp->DestroyInstance(instance, pAllocator);
263
264    // Clean up logging callback, if any
265    while (my_data->logging_callback.size() > 0) {
266        VkDebugReportCallbackEXT callback = my_data->logging_callback.back();
267        layer_destroy_msg_callback(my_data->report_data, callback, pAllocator);
268        my_data->logging_callback.pop_back();
269    }
270
271    layer_debug_report_destroy_instance(my_data->report_data);
272    layer_data_map.erase(key);
273}
274
275// Handle CreateDevice
276static void createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
277    layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
278    my_device_data->wsi_enabled = false;
279
280    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
281        if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
282            my_device_data->wsi_enabled = true;
283        }
284        // Check for recognized device extensions
285        if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedDeviceExtensions)) {
286            log_msg(my_device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
287                    __LINE__, 0, "UniqueObjects",
288                    "Device Extension %s is not supported by this layer.  Using this extension may adversely affect "
289                    "validation results and/or produce undefined behavior.",
290                    pCreateInfo->ppEnabledExtensionNames[i]);
291        }
292    }
293}
294
295VkResult explicit_CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
296                               VkDevice *pDevice) {
297    layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
298    VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
299
300    assert(chain_info->u.pLayerInfo);
301    PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
302    PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
303    PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice");
304    if (fpCreateDevice == NULL) {
305        return VK_ERROR_INITIALIZATION_FAILED;
306    }
307
308    // Advance the link info for the next element on the chain
309    chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
310
311    VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
312    if (result != VK_SUCCESS) {
313        return result;
314    }
315
316    layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
317    my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
318
319    // Setup layer's device dispatch table
320    initDeviceTable(*pDevice, fpGetDeviceProcAddr, unique_objects_device_table_map);
321
322    createDeviceRegisterExtensions(pCreateInfo, *pDevice);
323    // Set gpu for this device in order to get at any objects mapped at instance level
324
325    my_device_data->gpu = gpu;
326
327    return result;
328}
329
330void explicit_DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
331    dispatch_key key = get_dispatch_key(device);
332    layer_debug_report_destroy_device(device);
333    get_dispatch_table(unique_objects_device_table_map, device)->DestroyDevice(device, pAllocator);
334    layer_data_map.erase(key);
335}
336
337VkResult explicit_AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
338                                 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
339    const VkMemoryAllocateInfo *input_allocate_info = pAllocateInfo;
340    std::unique_ptr<safe_VkMemoryAllocateInfo> safe_allocate_info;
341    std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV> safe_dedicated_allocate_info;
342    layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
343
344    if ((pAllocateInfo != nullptr) &&
345        ContainsExtStruct(pAllocateInfo, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)) {
346        // Assuming there is only one extension struct of this type in the list for now
347        safe_dedicated_allocate_info =
348            std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV>(new safe_VkDedicatedAllocationMemoryAllocateInfoNV);
349        safe_allocate_info = std::unique_ptr<safe_VkMemoryAllocateInfo>(new safe_VkMemoryAllocateInfo);
350
351        safe_allocate_info->initialize(pAllocateInfo);
352        input_allocate_info = reinterpret_cast<const VkMemoryAllocateInfo *>(safe_allocate_info.get());
353
354        const GenericHeader *orig_pnext = reinterpret_cast<const GenericHeader *>(pAllocateInfo->pNext);
355        GenericHeader *input_pnext = reinterpret_cast<GenericHeader *>(safe_allocate_info.get());
356        while (orig_pnext != nullptr) {
357            if (orig_pnext->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV) {
358                safe_dedicated_allocate_info->initialize(
359                    reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(orig_pnext));
360
361                std::unique_lock<std::mutex> lock(global_lock);
362
363                if (safe_dedicated_allocate_info->buffer != VK_NULL_HANDLE) {
364                    uint64_t local_buffer = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->buffer);
365                    safe_dedicated_allocate_info->buffer =
366                        reinterpret_cast<VkBuffer &>(my_map_data->unique_id_mapping[local_buffer]);
367                }
368
369                if (safe_dedicated_allocate_info->image != VK_NULL_HANDLE) {
370                    uint64_t local_image = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->image);
371                    safe_dedicated_allocate_info->image = reinterpret_cast<VkImage &>(my_map_data->unique_id_mapping[local_image]);
372                }
373
374                lock.unlock();
375
376                input_pnext->pNext = reinterpret_cast<GenericHeader *>(safe_dedicated_allocate_info.get());
377                input_pnext = reinterpret_cast<GenericHeader *>(input_pnext->pNext);
378            } else {
379                // TODO: generic handling of pNext copies
380            }
381
382            orig_pnext = reinterpret_cast<const GenericHeader *>(orig_pnext->pNext);
383        }
384    }
385
386    VkResult result = get_dispatch_table(unique_objects_device_table_map, device)
387                          ->AllocateMemory(device, input_allocate_info, pAllocator, pMemory);
388
389    if (VK_SUCCESS == result) {
390        std::lock_guard<std::mutex> lock(global_lock);
391        uint64_t unique_id = global_unique_id++;
392        my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pMemory);
393        *pMemory = reinterpret_cast<VkDeviceMemory &>(unique_id);
394    }
395
396    return result;
397}
398
399VkResult explicit_CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
400                                         const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator,
401                                         VkPipeline *pPipelines) {
402    // STRUCT USES:{'pipelineCache': 'VkPipelineCache', 'pCreateInfos[createInfoCount]': {'stage': {'module': 'VkShaderModule'},
403    // 'layout': 'VkPipelineLayout', 'basePipelineHandle': 'VkPipeline'}}
404    // LOCAL DECLS:{'pCreateInfos': 'VkComputePipelineCreateInfo*'}
405    layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
406    safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
407    if (pCreateInfos) {
408        std::lock_guard<std::mutex> lock(global_lock);
409        local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
410        for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
411            local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
412            if (pCreateInfos[idx0].basePipelineHandle) {
413                local_pCreateInfos[idx0].basePipelineHandle =
414                    (VkPipeline)my_device_data
415                        ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
416            }
417            if (pCreateInfos[idx0].layout) {
418                local_pCreateInfos[idx0].layout =
419                    (VkPipelineLayout)
420                        my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
421            }
422            if (pCreateInfos[idx0].stage.module) {
423                local_pCreateInfos[idx0].stage.module =
424                    (VkShaderModule)
425                        my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].stage.module)];
426            }
427        }
428    }
429    if (pipelineCache) {
430        std::lock_guard<std::mutex> lock(global_lock);
431        pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
432    }
433
434    VkResult result = get_dispatch_table(unique_objects_device_table_map, device)
435                          ->CreateComputePipelines(device, pipelineCache, createInfoCount,
436                                                   (const VkComputePipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
437    delete[] local_pCreateInfos;
438    if (VK_SUCCESS == result) {
439        uint64_t unique_id = 0;
440        std::lock_guard<std::mutex> lock(global_lock);
441        for (uint32_t i = 0; i < createInfoCount; ++i) {
442            unique_id = global_unique_id++;
443            my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
444            pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
445        }
446    }
447    return result;
448}
449
450VkResult explicit_CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
451                                          const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator,
452                                          VkPipeline *pPipelines) {
453    // STRUCT USES:{'pipelineCache': 'VkPipelineCache', 'pCreateInfos[createInfoCount]': {'layout': 'VkPipelineLayout',
454    // 'pStages[stageCount]': {'module': 'VkShaderModule'}, 'renderPass': 'VkRenderPass', 'basePipelineHandle': 'VkPipeline'}}
455    // LOCAL DECLS:{'pCreateInfos': 'VkGraphicsPipelineCreateInfo*'}
456    layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
457    safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = NULL;
458    if (pCreateInfos) {
459        local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
460        std::lock_guard<std::mutex> lock(global_lock);
461        for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
462            local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
463            if (pCreateInfos[idx0].basePipelineHandle) {
464                local_pCreateInfos[idx0].basePipelineHandle =
465                    (VkPipeline)my_device_data
466                        ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
467            }
468            if (pCreateInfos[idx0].layout) {
469                local_pCreateInfos[idx0].layout =
470                    (VkPipelineLayout)
471                        my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
472            }
473            if (pCreateInfos[idx0].pStages) {
474                for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
475                    if (pCreateInfos[idx0].pStages[idx1].module) {
476                        local_pCreateInfos[idx0].pStages[idx1].module =
477                            (VkShaderModule)my_device_data
478                                ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].pStages[idx1].module)];
479                    }
480                }
481            }
482            if (pCreateInfos[idx0].renderPass) {
483                local_pCreateInfos[idx0].renderPass =
484                    (VkRenderPass)
485                        my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].renderPass)];
486            }
487        }
488    }
489    if (pipelineCache) {
490        std::lock_guard<std::mutex> lock(global_lock);
491        pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
492    }
493
494    VkResult result =
495        get_dispatch_table(unique_objects_device_table_map, device)
496            ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
497                                      (const VkGraphicsPipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
498    delete[] local_pCreateInfos;
499    if (VK_SUCCESS == result) {
500        uint64_t unique_id = 0;
501        std::lock_guard<std::mutex> lock(global_lock);
502        for (uint32_t i = 0; i < createInfoCount; ++i) {
503            unique_id = global_unique_id++;
504            my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
505            pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
506        }
507    }
508    return result;
509}
510
511VkResult explicit_CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
512                                     const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
513    layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
514
515    safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
516    if (pCreateInfo) {
517        std::lock_guard<std::mutex> lock(global_lock);
518        local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
519        local_pCreateInfo->oldSwapchain =
520            (VkSwapchainKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->oldSwapchain)];
521        // Need to pull surface mapping from the instance-level map
522        layer_data *instance_data = get_my_data_ptr(get_dispatch_key(my_map_data->gpu), layer_data_map);
523        local_pCreateInfo->surface =
524            (VkSurfaceKHR)instance_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->surface)];
525    }
526
527    VkResult result = get_dispatch_table(unique_objects_device_table_map, device)
528                          ->CreateSwapchainKHR(device, (const VkSwapchainCreateInfoKHR *)local_pCreateInfo, pAllocator, pSwapchain);
529    if (local_pCreateInfo)
530        delete local_pCreateInfo;
531    if (VK_SUCCESS == result) {
532        std::lock_guard<std::mutex> lock(global_lock);
533        uint64_t unique_id =global_unique_id++;
534        my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSwapchain);
535        *pSwapchain = reinterpret_cast<VkSwapchainKHR &>(unique_id);
536    }
537    return result;
538}
539
540VkResult explicit_GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
541                                        VkImage *pSwapchainImages) {
542    // UNWRAP USES:
543    //  0 : swapchain,VkSwapchainKHR, pSwapchainImages,VkImage
544    layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
545    if (VK_NULL_HANDLE != swapchain) {
546        std::lock_guard<std::mutex> lock(global_lock);
547        swapchain = (VkSwapchainKHR)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(swapchain)];
548    }
549    VkResult result = get_dispatch_table(unique_objects_device_table_map, device)
550                          ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
551    // TODO : Need to add corresponding code to delete these images
552    if (VK_SUCCESS == result) {
553        if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
554            uint64_t unique_id = 0;
555            std::lock_guard<std::mutex> lock(global_lock);
556            for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
557                unique_id = global_unique_id++;
558                my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pSwapchainImages[i]);
559                pSwapchainImages[i] = reinterpret_cast<VkImage &>(unique_id);
560            }
561        }
562    }
563    return result;
564}
565
566 #ifndef __ANDROID__
567VkResult explicit_GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties)
568{
569    layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
570    safe_VkDisplayPropertiesKHR* local_pProperties = NULL;
571    {
572        std::lock_guard<std::mutex> lock(global_lock);
573        if (pProperties) {
574            local_pProperties = new safe_VkDisplayPropertiesKHR[*pPropertyCount];
575            for (uint32_t idx0=0; idx0<*pPropertyCount; ++idx0) {
576                local_pProperties[idx0].initialize(&pProperties[idx0]);
577                if (pProperties[idx0].display) {
578                    local_pProperties[idx0].display = (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pProperties[idx0].display)];
579                }
580            }
581        }
582    }
583
584    VkResult result = get_dispatch_table(unique_objects_instance_table_map, physicalDevice)->GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, ( VkDisplayPropertiesKHR*)local_pProperties);
585    if (result == VK_SUCCESS && pProperties)
586    {
587        for (uint32_t idx0=0; idx0<*pPropertyCount; ++idx0) {
588            std::lock_guard<std::mutex> lock(global_lock);
589
590            uint64_t unique_id = global_unique_id++;
591            my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].display);
592            pProperties[idx0].display = reinterpret_cast<VkDisplayKHR&>(unique_id);
593            pProperties[idx0].displayName = local_pProperties[idx0].displayName;
594            pProperties[idx0].physicalDimensions = local_pProperties[idx0].physicalDimensions;
595            pProperties[idx0].physicalResolution = local_pProperties[idx0].physicalResolution;
596            pProperties[idx0].supportedTransforms = local_pProperties[idx0].supportedTransforms;
597            pProperties[idx0].planeReorderPossible = local_pProperties[idx0].planeReorderPossible;
598            pProperties[idx0].persistentContent = local_pProperties[idx0].persistentContent;
599        }
600    }
601    if (local_pProperties)
602        delete[] local_pProperties;
603    return result;
604}
605
606VkResult explicit_GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays)
607{
608    layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
609    VkResult result = get_dispatch_table(unique_objects_instance_table_map, physicalDevice)->GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays);
610    if (VK_SUCCESS == result) {
611        if ((*pDisplayCount > 0) && pDisplays) {
612            std::lock_guard<std::mutex> lock(global_lock);
613            for (uint32_t i = 0; i < *pDisplayCount; i++) {
614		    auto it = my_map_data->unique_id_mapping.find(reinterpret_cast<const uint64_t &> (pDisplays[i]));
615                assert (it !=  my_map_data->unique_id_mapping.end());
616                pDisplays[i] = reinterpret_cast<VkDisplayKHR&> (it->second);
617            }
618        }
619    }
620    return result;
621}
622
623
624VkResult explicit_GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties)
625{
626    layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
627    safe_VkDisplayModePropertiesKHR* local_pProperties = NULL;
628    {
629        std::lock_guard<std::mutex> lock(global_lock);
630        display = (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<uint64_t &>(display)];
631        if (pProperties) {
632            local_pProperties = new safe_VkDisplayModePropertiesKHR[*pPropertyCount];
633            for (uint32_t idx0=0; idx0<*pPropertyCount; ++idx0) {
634                local_pProperties[idx0].initialize(&pProperties[idx0]);
635            }
636        }
637    }
638
639    VkResult result = get_dispatch_table(unique_objects_instance_table_map, physicalDevice)->GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, ( VkDisplayModePropertiesKHR*)local_pProperties);
640    if (result == VK_SUCCESS && pProperties)
641    {
642        for (uint32_t idx0=0; idx0<*pPropertyCount; ++idx0) {
643            std::lock_guard<std::mutex> lock(global_lock);
644
645            uint64_t unique_id = global_unique_id++;
646            my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].displayMode);
647            pProperties[idx0].displayMode = reinterpret_cast<VkDisplayModeKHR&>(unique_id);
648            pProperties[idx0].parameters.visibleRegion.width = local_pProperties[idx0].parameters.visibleRegion.width;
649            pProperties[idx0].parameters.visibleRegion.height = local_pProperties[idx0].parameters.visibleRegion.height;
650            pProperties[idx0].parameters.refreshRate = local_pProperties[idx0].parameters.refreshRate;
651        }
652    }
653    if (local_pProperties)
654        delete[] local_pProperties;
655    return result;
656}
657#endif
658} // namespace unique_objects
659