null_driver.cpp revision b147127b06c1ce6443839e8102d1ed2631a57b07
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18#include <vulkan/vk_ext_debug_report.h>
19
20#include <algorithm>
21#include <array>
22#include <inttypes.h>
23#include <string.h>
24
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver_gen.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40    uint64_t next_callback_handle;
41};
42
43struct VkQueue_T {
44    hwvulkan_dispatch_t dispatch;
45};
46
47struct VkCommandBuffer_T {
48    hwvulkan_dispatch_t dispatch;
49};
50
51namespace {
52// Handles for non-dispatchable objects are either pointers, or arbitrary
53// 64-bit non-zero values. We only use pointers when we need to keep state for
54// the object even in a null driver. For the rest, we form a handle as:
55//   [63:63] = 1 to distinguish from pointer handles*
56//   [62:56] = non-zero handle type enum value
57//   [55: 0] = per-handle-type incrementing counter
58// * This works because virtual addresses with the high bit set are reserved
59// for kernel data in all ABIs we run on.
60//
61// We never reclaim handles on vkDestroy*. It's not even necessary for us to
62// have distinct handles for live objects, and practically speaking we won't
63// ever create 2^56 objects of the same type from a single VkDevice in a null
64// driver.
65//
66// Using a namespace here instead of 'enum class' since we want scoped
67// constants but also want implicit conversions to integral types.
68namespace HandleType {
69enum Enum {
70    kBufferView,
71    kDebugReportCallbackEXT,
72    kDescriptorPool,
73    kDescriptorSet,
74    kDescriptorSetLayout,
75    kEvent,
76    kFence,
77    kFramebuffer,
78    kImageView,
79    kPipeline,
80    kPipelineCache,
81    kPipelineLayout,
82    kQueryPool,
83    kRenderPass,
84    kSampler,
85    kSemaphore,
86    kShaderModule,
87
88    kNumTypes
89};
90}  // namespace HandleType
91
92const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
93
94}  // anonymous namespace
95
96struct VkDevice_T {
97    hwvulkan_dispatch_t dispatch;
98    VkAllocationCallbacks allocator;
99    VkInstance_T* instance;
100    VkQueue_T queue;
101    std::array<uint64_t, HandleType::kNumTypes> next_handle;
102};
103
104// -----------------------------------------------------------------------------
105// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
106// later.
107
108namespace {
109int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
110hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
111}  // namespace
112
113#pragma clang diagnostic push
114#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
115__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
116    .common =
117        {
118            .tag = HARDWARE_MODULE_TAG,
119            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
120            .hal_api_version = HARDWARE_HAL_API_VERSION,
121            .id = HWVULKAN_HARDWARE_MODULE_ID,
122            .name = "Null Vulkan Driver",
123            .author = "The Android Open Source Project",
124            .methods = &nulldrv_module_methods,
125        },
126};
127#pragma clang diagnostic pop
128
129// -----------------------------------------------------------------------------
130
131namespace {
132
133int CloseDevice(struct hw_device_t* /*device*/) {
134    // nothing to do - opening a device doesn't allocate any resources
135    return 0;
136}
137
138hwvulkan_device_t nulldrv_device = {
139    .common =
140        {
141            .tag = HARDWARE_DEVICE_TAG,
142            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
143            .module = &HAL_MODULE_INFO_SYM.common,
144            .close = CloseDevice,
145        },
146    .EnumerateInstanceExtensionProperties =
147        EnumerateInstanceExtensionProperties,
148    .CreateInstance = CreateInstance,
149    .GetInstanceProcAddr = GetInstanceProcAddr};
150
151int OpenDevice(const hw_module_t* /*module*/,
152               const char* id,
153               hw_device_t** device) {
154    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
155        *device = &nulldrv_device.common;
156        return 0;
157    }
158    return -ENOENT;
159}
160
161VkInstance_T* GetInstanceFromPhysicalDevice(
162    VkPhysicalDevice_T* physical_device) {
163    return reinterpret_cast<VkInstance_T*>(
164        reinterpret_cast<uintptr_t>(physical_device) -
165        offsetof(VkInstance_T, physical_device));
166}
167
168uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
169    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
170    ALOGE_IF(*next_handle == kHandleMask,
171             "non-dispatchable handles of type=%" PRIu64
172             " are about to overflow",
173             type);
174    return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
175           ((*next_handle)++ & kHandleMask);
176}
177
178template <class Handle>
179Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
180    return reinterpret_cast<Handle>(
181        AllocHandle(type, &instance->next_callback_handle));
182}
183
184template <class Handle>
185Handle AllocHandle(VkDevice device, HandleType::Enum type) {
186    return reinterpret_cast<Handle>(
187        AllocHandle(type, &device->next_handle[type]));
188}
189
190}  // namespace
191
192namespace null_driver {
193
194#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
195    T* Get##T##FromHandle(Vk##T h);                     \
196    T* Get##T##FromHandle(Vk##T h) {                    \
197        return reinterpret_cast<T*>(uintptr_t(h));      \
198    }                                                   \
199    Vk##T GetHandleTo##T(const T* obj);                 \
200    Vk##T GetHandleTo##T(const T* obj) {                \
201        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
202    }
203
204// -----------------------------------------------------------------------------
205// Global
206
207VKAPI_ATTR
208VkResult EnumerateInstanceExtensionProperties(
209    const char* layer_name,
210    uint32_t* count,
211    VkExtensionProperties* properties) {
212    if (layer_name) {
213        ALOGW(
214            "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
215            "with a layer name ('%s')",
216            layer_name);
217        *count = 0;
218        return VK_SUCCESS;
219    }
220
221    const VkExtensionProperties kExtensions[] = {
222        {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
223    const uint32_t kExtensionsCount =
224        sizeof(kExtensions) / sizeof(kExtensions[0]);
225
226    if (!properties || *count > kExtensionsCount)
227        *count = kExtensionsCount;
228    if (properties)
229        std::copy(kExtensions, kExtensions + *count, properties);
230    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
231}
232
233VKAPI_ATTR
234VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
235                        const VkAllocationCallbacks* allocator,
236                        VkInstance* out_instance) {
237    // Assume the loader provided alloc callbacks even if the app didn't.
238    ALOG_ASSERT(
239        allocator,
240        "Missing alloc callbacks, loader or app should have provided them");
241
242    VkInstance_T* instance =
243        static_cast<VkInstance_T*>(allocator->pfnAllocation(
244            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
245            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
246    if (!instance)
247        return VK_ERROR_OUT_OF_HOST_MEMORY;
248
249    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
250    instance->allocator = *allocator;
251    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
252    instance->next_callback_handle = 0;
253
254    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
255        if (strcmp(create_info->ppEnabledExtensionNames[i],
256                   VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
257            ALOGV("instance extension '%s' requested",
258                  create_info->ppEnabledExtensionNames[i]);
259        } else {
260            ALOGW("unsupported extension '%s' requested",
261                  create_info->ppEnabledExtensionNames[i]);
262        }
263    }
264
265    *out_instance = instance;
266    return VK_SUCCESS;
267}
268
269VKAPI_ATTR
270PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
271    return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
272}
273
274VKAPI_ATTR
275PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
276    return GetInstanceProcAddr(name);
277}
278
279// -----------------------------------------------------------------------------
280// Instance
281
282void DestroyInstance(VkInstance instance,
283                     const VkAllocationCallbacks* /*allocator*/) {
284    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
285}
286
287// -----------------------------------------------------------------------------
288// PhysicalDevice
289
290VkResult EnumeratePhysicalDevices(VkInstance instance,
291                                  uint32_t* physical_device_count,
292                                  VkPhysicalDevice* physical_devices) {
293    if (physical_devices && *physical_device_count >= 1)
294        physical_devices[0] = &instance->physical_device;
295    *physical_device_count = 1;
296    return VK_SUCCESS;
297}
298
299VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
300                                        uint32_t* count,
301                                        VkLayerProperties* /*properties*/) {
302    ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
303    *count = 0;
304    return VK_SUCCESS;
305}
306
307VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
308                                            const char* layer_name,
309                                            uint32_t* count,
310                                            VkExtensionProperties* properties) {
311    if (layer_name) {
312        ALOGW(
313            "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
314            "with a layer name ('%s')",
315            layer_name);
316        *count = 0;
317        return VK_SUCCESS;
318    }
319
320    const VkExtensionProperties kExtensions[] = {
321        {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
322         VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
323    const uint32_t kExtensionsCount =
324        sizeof(kExtensions) / sizeof(kExtensions[0]);
325
326    if (!properties || *count > kExtensionsCount)
327        *count = kExtensionsCount;
328    if (properties)
329        std::copy(kExtensions, kExtensions + *count, properties);
330    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
331}
332
333void GetPhysicalDeviceProperties(VkPhysicalDevice,
334                                 VkPhysicalDeviceProperties* properties) {
335    properties->apiVersion = VK_API_VERSION;
336    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
337    properties->vendorID = 0;
338    properties->deviceID = 0;
339    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
340    strcpy(properties->deviceName, "Android Vulkan Null Driver");
341    memset(properties->pipelineCacheUUID, 0,
342           sizeof(properties->pipelineCacheUUID));
343}
344
345void GetPhysicalDeviceQueueFamilyProperties(
346    VkPhysicalDevice,
347    uint32_t* count,
348    VkQueueFamilyProperties* properties) {
349    if (!properties || *count > 1)
350        *count = 1;
351    if (properties && *count == 1) {
352        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
353                                 VK_QUEUE_TRANSFER_BIT;
354        properties->queueCount = 1;
355        properties->timestampValidBits = 64;
356        properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
357    }
358}
359
360void GetPhysicalDeviceMemoryProperties(
361    VkPhysicalDevice,
362    VkPhysicalDeviceMemoryProperties* properties) {
363    properties->memoryTypeCount = 1;
364    properties->memoryTypes[0].propertyFlags =
365        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
366        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
367        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
368        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
369    properties->memoryTypes[0].heapIndex = 0;
370    properties->memoryHeapCount = 1;
371    properties->memoryHeaps[0].size = kMaxDeviceMemory;
372    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
373}
374
375// -----------------------------------------------------------------------------
376// Device
377
378VkResult CreateDevice(VkPhysicalDevice physical_device,
379                      const VkDeviceCreateInfo* create_info,
380                      const VkAllocationCallbacks* allocator,
381                      VkDevice* out_device) {
382    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
383    if (!allocator)
384        allocator = &instance->allocator;
385    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
386        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
387        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
388    if (!device)
389        return VK_ERROR_OUT_OF_HOST_MEMORY;
390
391    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
392    device->allocator = *allocator;
393    device->instance = instance;
394    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
395    std::fill(device->next_handle.begin(), device->next_handle.end(),
396              UINT64_C(0));
397
398    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
399        if (strcmp(create_info->ppEnabledExtensionNames[i],
400                   VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
401            ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
402        }
403    }
404
405    *out_device = device;
406    return VK_SUCCESS;
407}
408
409void DestroyDevice(VkDevice device,
410                   const VkAllocationCallbacks* /*allocator*/) {
411    if (!device)
412        return;
413    device->allocator.pfnFree(device->allocator.pUserData, device);
414}
415
416void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
417    *queue = &device->queue;
418}
419
420// -----------------------------------------------------------------------------
421// CommandPool
422
423struct CommandPool {
424    typedef VkCommandPool HandleType;
425    VkAllocationCallbacks allocator;
426};
427DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
428
429VkResult CreateCommandPool(VkDevice device,
430                           const VkCommandPoolCreateInfo* /*create_info*/,
431                           const VkAllocationCallbacks* allocator,
432                           VkCommandPool* cmd_pool) {
433    if (!allocator)
434        allocator = &device->allocator;
435    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
436        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
437        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
438    if (!pool)
439        return VK_ERROR_OUT_OF_HOST_MEMORY;
440    pool->allocator = *allocator;
441    *cmd_pool = GetHandleToCommandPool(pool);
442    return VK_SUCCESS;
443}
444
445void DestroyCommandPool(VkDevice /*device*/,
446                        VkCommandPool cmd_pool,
447                        const VkAllocationCallbacks* /*allocator*/) {
448    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
449    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
450}
451
452// -----------------------------------------------------------------------------
453// CmdBuffer
454
455VkResult AllocateCommandBuffers(VkDevice /*device*/,
456                                const VkCommandBufferAllocateInfo* alloc_info,
457                                VkCommandBuffer* cmdbufs) {
458    VkResult result = VK_SUCCESS;
459    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
460    std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
461    for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
462        cmdbufs[i] =
463            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
464                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
465                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
466        if (!cmdbufs[i]) {
467            result = VK_ERROR_OUT_OF_HOST_MEMORY;
468            break;
469        }
470        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
471    }
472    if (result != VK_SUCCESS) {
473        for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
474            if (!cmdbufs[i])
475                break;
476            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
477        }
478    }
479    return result;
480}
481
482void FreeCommandBuffers(VkDevice /*device*/,
483                        VkCommandPool cmd_pool,
484                        uint32_t count,
485                        const VkCommandBuffer* cmdbufs) {
486    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
487    for (uint32_t i = 0; i < count; i++)
488        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
489}
490
491// -----------------------------------------------------------------------------
492// DeviceMemory
493
494struct DeviceMemory {
495    typedef VkDeviceMemory HandleType;
496    VkDeviceSize size;
497    alignas(16) uint8_t data[0];
498};
499DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
500
501VkResult AllocateMemory(VkDevice device,
502                        const VkMemoryAllocateInfo* alloc_info,
503                        const VkAllocationCallbacks* allocator,
504                        VkDeviceMemory* mem_handle) {
505    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
506        return VK_ERROR_OUT_OF_HOST_MEMORY;
507    if (!allocator)
508        allocator = &device->allocator;
509
510    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
511    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
512        allocator->pUserData, size, alignof(DeviceMemory),
513        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
514    if (!mem)
515        return VK_ERROR_OUT_OF_HOST_MEMORY;
516    mem->size = size;
517    *mem_handle = GetHandleToDeviceMemory(mem);
518    return VK_SUCCESS;
519}
520
521void FreeMemory(VkDevice device,
522                VkDeviceMemory mem_handle,
523                const VkAllocationCallbacks* allocator) {
524    if (!allocator)
525        allocator = &device->allocator;
526    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
527    allocator->pfnFree(allocator->pUserData, mem);
528}
529
530VkResult MapMemory(VkDevice,
531                   VkDeviceMemory mem_handle,
532                   VkDeviceSize offset,
533                   VkDeviceSize,
534                   VkMemoryMapFlags,
535                   void** out_ptr) {
536    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
537    *out_ptr = &mem->data[0] + offset;
538    return VK_SUCCESS;
539}
540
541// -----------------------------------------------------------------------------
542// Buffer
543
544struct Buffer {
545    typedef VkBuffer HandleType;
546    VkDeviceSize size;
547};
548DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
549
550VkResult CreateBuffer(VkDevice device,
551                      const VkBufferCreateInfo* create_info,
552                      const VkAllocationCallbacks* allocator,
553                      VkBuffer* buffer_handle) {
554    ALOGW_IF(create_info->size > kMaxDeviceMemory,
555             "CreateBuffer: requested size 0x%" PRIx64
556             " exceeds max device memory size 0x%" PRIx64,
557             create_info->size, kMaxDeviceMemory);
558    if (!allocator)
559        allocator = &device->allocator;
560    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
561        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
562        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
563    if (!buffer)
564        return VK_ERROR_OUT_OF_HOST_MEMORY;
565    buffer->size = create_info->size;
566    *buffer_handle = GetHandleToBuffer(buffer);
567    return VK_SUCCESS;
568}
569
570void GetBufferMemoryRequirements(VkDevice,
571                                 VkBuffer buffer_handle,
572                                 VkMemoryRequirements* requirements) {
573    Buffer* buffer = GetBufferFromHandle(buffer_handle);
574    requirements->size = buffer->size;
575    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
576    requirements->memoryTypeBits = 0x1;
577}
578
579void DestroyBuffer(VkDevice device,
580                   VkBuffer buffer_handle,
581                   const VkAllocationCallbacks* allocator) {
582    if (!allocator)
583        allocator = &device->allocator;
584    Buffer* buffer = GetBufferFromHandle(buffer_handle);
585    allocator->pfnFree(allocator->pUserData, buffer);
586}
587
588// -----------------------------------------------------------------------------
589// Image
590
591struct Image {
592    typedef VkImage HandleType;
593    VkDeviceSize size;
594};
595DEFINE_OBJECT_HANDLE_CONVERSION(Image)
596
597VkResult CreateImage(VkDevice device,
598                     const VkImageCreateInfo* create_info,
599                     const VkAllocationCallbacks* allocator,
600                     VkImage* image_handle) {
601    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
602        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
603        create_info->mipLevels != 1) {
604        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
605              create_info->imageType, create_info->format,
606              create_info->mipLevels);
607        return VK_ERROR_OUT_OF_HOST_MEMORY;
608    }
609
610    VkDeviceSize size =
611        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
612        create_info->arrayLayers * create_info->samples * 4u;
613    ALOGW_IF(size > kMaxDeviceMemory,
614             "CreateImage: image size 0x%" PRIx64
615             " exceeds max device memory size 0x%" PRIx64,
616             size, kMaxDeviceMemory);
617
618    if (!allocator)
619        allocator = &device->allocator;
620    Image* image = static_cast<Image*>(allocator->pfnAllocation(
621        allocator->pUserData, sizeof(Image), alignof(Image),
622        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
623    if (!image)
624        return VK_ERROR_OUT_OF_HOST_MEMORY;
625    image->size = size;
626    *image_handle = GetHandleToImage(image);
627    return VK_SUCCESS;
628}
629
630void GetImageMemoryRequirements(VkDevice,
631                                VkImage image_handle,
632                                VkMemoryRequirements* requirements) {
633    Image* image = GetImageFromHandle(image_handle);
634    requirements->size = image->size;
635    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
636    requirements->memoryTypeBits = 0x1;
637}
638
639void DestroyImage(VkDevice device,
640                  VkImage image_handle,
641                  const VkAllocationCallbacks* allocator) {
642    if (!allocator)
643        allocator = &device->allocator;
644    Image* image = GetImageFromHandle(image_handle);
645    allocator->pfnFree(allocator->pUserData, image);
646}
647
648VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
649                                         VkFormat,
650                                         VkImageUsageFlags,
651                                         int* grallocUsage) {
652    // The null driver never reads or writes the gralloc buffer
653    *grallocUsage = 0;
654    return VK_SUCCESS;
655}
656
657VkResult AcquireImageANDROID(VkDevice,
658                             VkImage,
659                             int fence,
660                             VkSemaphore,
661                             VkFence) {
662    close(fence);
663    return VK_SUCCESS;
664}
665
666VkResult QueueSignalReleaseImageANDROID(VkQueue,
667                                        uint32_t,
668                                        const VkSemaphore*,
669                                        VkImage,
670                                        int* fence) {
671    *fence = -1;
672    return VK_SUCCESS;
673}
674
675// -----------------------------------------------------------------------------
676// No-op types
677
678VkResult CreateBufferView(VkDevice device,
679                          const VkBufferViewCreateInfo*,
680                          const VkAllocationCallbacks* /*allocator*/,
681                          VkBufferView* view) {
682    *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
683    return VK_SUCCESS;
684}
685
686VkResult CreateDescriptorPool(VkDevice device,
687                              const VkDescriptorPoolCreateInfo*,
688                              const VkAllocationCallbacks* /*allocator*/,
689                              VkDescriptorPool* pool) {
690    *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
691    return VK_SUCCESS;
692}
693
694VkResult AllocateDescriptorSets(VkDevice device,
695                                const VkDescriptorSetAllocateInfo* alloc_info,
696                                VkDescriptorSet* descriptor_sets) {
697    for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
698        descriptor_sets[i] =
699            AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
700    return VK_SUCCESS;
701}
702
703VkResult CreateDescriptorSetLayout(VkDevice device,
704                                   const VkDescriptorSetLayoutCreateInfo*,
705                                   const VkAllocationCallbacks* /*allocator*/,
706                                   VkDescriptorSetLayout* layout) {
707    *layout = AllocHandle<VkDescriptorSetLayout>(
708        device, HandleType::kDescriptorSetLayout);
709    return VK_SUCCESS;
710}
711
712VkResult CreateEvent(VkDevice device,
713                     const VkEventCreateInfo*,
714                     const VkAllocationCallbacks* /*allocator*/,
715                     VkEvent* event) {
716    *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
717    return VK_SUCCESS;
718}
719
720VkResult CreateFence(VkDevice device,
721                     const VkFenceCreateInfo*,
722                     const VkAllocationCallbacks* /*allocator*/,
723                     VkFence* fence) {
724    *fence = AllocHandle<VkFence>(device, HandleType::kFence);
725    return VK_SUCCESS;
726}
727
728VkResult CreateFramebuffer(VkDevice device,
729                           const VkFramebufferCreateInfo*,
730                           const VkAllocationCallbacks* /*allocator*/,
731                           VkFramebuffer* framebuffer) {
732    *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
733    return VK_SUCCESS;
734}
735
736VkResult CreateImageView(VkDevice device,
737                         const VkImageViewCreateInfo*,
738                         const VkAllocationCallbacks* /*allocator*/,
739                         VkImageView* view) {
740    *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
741    return VK_SUCCESS;
742}
743
744VkResult CreateGraphicsPipelines(VkDevice device,
745                                 VkPipelineCache,
746                                 uint32_t count,
747                                 const VkGraphicsPipelineCreateInfo*,
748                                 const VkAllocationCallbacks* /*allocator*/,
749                                 VkPipeline* pipelines) {
750    for (uint32_t i = 0; i < count; i++)
751        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
752    return VK_SUCCESS;
753}
754
755VkResult CreateComputePipelines(VkDevice device,
756                                VkPipelineCache,
757                                uint32_t count,
758                                const VkComputePipelineCreateInfo*,
759                                const VkAllocationCallbacks* /*allocator*/,
760                                VkPipeline* pipelines) {
761    for (uint32_t i = 0; i < count; i++)
762        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
763    return VK_SUCCESS;
764}
765
766VkResult CreatePipelineCache(VkDevice device,
767                             const VkPipelineCacheCreateInfo*,
768                             const VkAllocationCallbacks* /*allocator*/,
769                             VkPipelineCache* cache) {
770    *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
771    return VK_SUCCESS;
772}
773
774VkResult CreatePipelineLayout(VkDevice device,
775                              const VkPipelineLayoutCreateInfo*,
776                              const VkAllocationCallbacks* /*allocator*/,
777                              VkPipelineLayout* layout) {
778    *layout =
779        AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
780    return VK_SUCCESS;
781}
782
783VkResult CreateQueryPool(VkDevice device,
784                         const VkQueryPoolCreateInfo*,
785                         const VkAllocationCallbacks* /*allocator*/,
786                         VkQueryPool* pool) {
787    *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
788    return VK_SUCCESS;
789}
790
791VkResult CreateRenderPass(VkDevice device,
792                          const VkRenderPassCreateInfo*,
793                          const VkAllocationCallbacks* /*allocator*/,
794                          VkRenderPass* renderpass) {
795    *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
796    return VK_SUCCESS;
797}
798
799VkResult CreateSampler(VkDevice device,
800                       const VkSamplerCreateInfo*,
801                       const VkAllocationCallbacks* /*allocator*/,
802                       VkSampler* sampler) {
803    *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
804    return VK_SUCCESS;
805}
806
807VkResult CreateSemaphore(VkDevice device,
808                         const VkSemaphoreCreateInfo*,
809                         const VkAllocationCallbacks* /*allocator*/,
810                         VkSemaphore* semaphore) {
811    *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
812    return VK_SUCCESS;
813}
814
815VkResult CreateShaderModule(VkDevice device,
816                            const VkShaderModuleCreateInfo*,
817                            const VkAllocationCallbacks* /*allocator*/,
818                            VkShaderModule* module) {
819    *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
820    return VK_SUCCESS;
821}
822
823VkResult CreateDebugReportCallbackEXT(VkInstance instance,
824                                      const VkDebugReportCallbackCreateInfoEXT*,
825                                      const VkAllocationCallbacks*,
826                                      VkDebugReportCallbackEXT* callback) {
827    *callback = AllocHandle<VkDebugReportCallbackEXT>(
828        instance, HandleType::kDebugReportCallbackEXT);
829    return VK_SUCCESS;
830}
831
832// -----------------------------------------------------------------------------
833// No-op entrypoints
834
835// clang-format off
836#pragma clang diagnostic push
837#pragma clang diagnostic ignored "-Wunused-parameter"
838
839void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
840    ALOGV("TODO: vk%s", __FUNCTION__);
841}
842
843void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
844    ALOGV("TODO: vk%s", __FUNCTION__);
845}
846
847VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
848    ALOGV("TODO: vk%s", __FUNCTION__);
849    return VK_SUCCESS;
850}
851
852VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
853    ALOGV("TODO: vk%s", __FUNCTION__);
854    return VK_SUCCESS;
855}
856
857VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
858    return VK_SUCCESS;
859}
860
861VkResult QueueWaitIdle(VkQueue queue) {
862    ALOGV("TODO: vk%s", __FUNCTION__);
863    return VK_SUCCESS;
864}
865
866VkResult DeviceWaitIdle(VkDevice device) {
867    ALOGV("TODO: vk%s", __FUNCTION__);
868    return VK_SUCCESS;
869}
870
871void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
872}
873
874VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
875    ALOGV("TODO: vk%s", __FUNCTION__);
876    return VK_SUCCESS;
877}
878
879VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
880    ALOGV("TODO: vk%s", __FUNCTION__);
881    return VK_SUCCESS;
882}
883
884void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
885    ALOGV("TODO: vk%s", __FUNCTION__);
886}
887
888VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
889    return VK_SUCCESS;
890}
891
892VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
893    return VK_SUCCESS;
894}
895
896void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
897    ALOGV("TODO: vk%s", __FUNCTION__);
898}
899
900void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
901    ALOGV("TODO: vk%s", __FUNCTION__);
902}
903
904VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
905    ALOGV("TODO: vk%s", __FUNCTION__);
906    return VK_SUCCESS;
907}
908
909void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
910}
911
912VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
913    return VK_SUCCESS;
914}
915
916VkResult GetFenceStatus(VkDevice device, VkFence fence) {
917    ALOGV("TODO: vk%s", __FUNCTION__);
918    return VK_SUCCESS;
919}
920
921VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
922    return VK_SUCCESS;
923}
924
925void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
926}
927
928void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
929}
930
931VkResult GetEventStatus(VkDevice device, VkEvent event) {
932    ALOGV("TODO: vk%s", __FUNCTION__);
933    return VK_SUCCESS;
934}
935
936VkResult SetEvent(VkDevice device, VkEvent event) {
937    ALOGV("TODO: vk%s", __FUNCTION__);
938    return VK_SUCCESS;
939}
940
941VkResult ResetEvent(VkDevice device, VkEvent event) {
942    ALOGV("TODO: vk%s", __FUNCTION__);
943    return VK_SUCCESS;
944}
945
946void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
947}
948
949VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
950    ALOGV("TODO: vk%s", __FUNCTION__);
951    return VK_SUCCESS;
952}
953
954void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
955}
956
957void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
958    ALOGV("TODO: vk%s", __FUNCTION__);
959}
960
961void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
962}
963
964void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
965}
966
967void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
968}
969
970VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
971    ALOGV("TODO: vk%s", __FUNCTION__);
972    return VK_SUCCESS;
973}
974
975VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
976    ALOGV("TODO: vk%s", __FUNCTION__);
977    return VK_SUCCESS;
978}
979
980void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
981}
982
983void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
984}
985
986void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
987}
988
989void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
990}
991
992void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
993}
994
995VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
996    ALOGV("TODO: vk%s", __FUNCTION__);
997    return VK_SUCCESS;
998}
999
1000void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1001    ALOGV("TODO: vk%s", __FUNCTION__);
1002}
1003
1004VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1005    ALOGV("TODO: vk%s", __FUNCTION__);
1006    return VK_SUCCESS;
1007}
1008
1009void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1010}
1011
1012void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1013}
1014
1015void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1016    ALOGV("TODO: vk%s", __FUNCTION__);
1017}
1018
1019VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1020    ALOGV("TODO: vk%s", __FUNCTION__);
1021    return VK_SUCCESS;
1022}
1023
1024VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1025    return VK_SUCCESS;
1026}
1027
1028VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1029    return VK_SUCCESS;
1030}
1031
1032VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1033    ALOGV("TODO: vk%s", __FUNCTION__);
1034    return VK_SUCCESS;
1035}
1036
1037void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1038}
1039
1040void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1041}
1042
1043void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1044}
1045
1046void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1047}
1048
1049void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1050}
1051
1052void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1053}
1054
1055void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1056}
1057
1058void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1059}
1060
1061void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1062}
1063
1064void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1065}
1066
1067void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1068}
1069
1070void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1071}
1072
1073void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1074}
1075
1076void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1077}
1078
1079void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1080}
1081
1082void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1083}
1084
1085void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1086}
1087
1088void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1089}
1090
1091void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1092}
1093
1094void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1095}
1096
1097void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1098}
1099
1100void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1101}
1102
1103void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1104}
1105
1106void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1107}
1108
1109void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1110}
1111
1112void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1113}
1114
1115void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1116}
1117
1118void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1119}
1120
1121void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1122}
1123
1124void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1125}
1126
1127void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1128}
1129
1130void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1131}
1132
1133void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1134}
1135
1136void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1137}
1138
1139void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1140}
1141
1142void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1143}
1144
1145void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1146}
1147
1148void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1149}
1150
1151void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1152}
1153
1154void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1155}
1156
1157void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1158}
1159
1160void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1161}
1162
1163void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1164}
1165
1166void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1167}
1168
1169void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1170}
1171
1172void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1173}
1174
1175#pragma clang diagnostic pop
1176// clang-format on
1177
1178}  // namespace null_driver
1179