null_driver.cpp revision 57f7f8c140c4b665f05c17866ebf201e9f4f46a4
1/*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <hardware/hwvulkan.h>
18#include <vulkan/vk_ext_debug_report.h>
19
20#include <algorithm>
21#include <array>
22#include <inttypes.h>
23#include <string.h>
24
25#include <log/log.h>
26#include <utils/Errors.h>
27
28#include "null_driver_gen.h"
29
30using namespace null_driver;
31
32struct VkPhysicalDevice_T {
33    hwvulkan_dispatch_t dispatch;
34};
35
36struct VkInstance_T {
37    hwvulkan_dispatch_t dispatch;
38    VkAllocationCallbacks allocator;
39    VkPhysicalDevice_T physical_device;
40    uint64_t next_callback_handle;
41    bool debug_report_enabled;
42};
43
44struct VkQueue_T {
45    hwvulkan_dispatch_t dispatch;
46};
47
48struct VkCommandBuffer_T {
49    hwvulkan_dispatch_t dispatch;
50};
51
52namespace {
53// Handles for non-dispatchable objects are either pointers, or arbitrary
54// 64-bit non-zero values. We only use pointers when we need to keep state for
55// the object even in a null driver. For the rest, we form a handle as:
56//   [63:63] = 1 to distinguish from pointer handles*
57//   [62:56] = non-zero handle type enum value
58//   [55: 0] = per-handle-type incrementing counter
59// * This works because virtual addresses with the high bit set are reserved
60// for kernel data in all ABIs we run on.
61//
62// We never reclaim handles on vkDestroy*. It's not even necessary for us to
63// have distinct handles for live objects, and practically speaking we won't
64// ever create 2^56 objects of the same type from a single VkDevice in a null
65// driver.
66//
67// Using a namespace here instead of 'enum class' since we want scoped
68// constants but also want implicit conversions to integral types.
69namespace HandleType {
70enum Enum {
71    kBufferView,
72    kDebugReportCallbackEXT,
73    kDescriptorPool,
74    kDescriptorSet,
75    kDescriptorSetLayout,
76    kEvent,
77    kFence,
78    kFramebuffer,
79    kImageView,
80    kPipeline,
81    kPipelineCache,
82    kPipelineLayout,
83    kQueryPool,
84    kRenderPass,
85    kSampler,
86    kSemaphore,
87    kShaderModule,
88
89    kNumTypes
90};
91}  // namespace HandleType
92
93const VkDeviceSize kMaxDeviceMemory = VkDeviceSize(INTPTR_MAX) + 1;
94
95}  // anonymous namespace
96
97struct VkDevice_T {
98    hwvulkan_dispatch_t dispatch;
99    VkAllocationCallbacks allocator;
100    VkInstance_T* instance;
101    VkQueue_T queue;
102    std::array<uint64_t, HandleType::kNumTypes> next_handle;
103};
104
105// -----------------------------------------------------------------------------
106// Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
107// later.
108
109namespace {
110int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
111hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
112}  // namespace
113
114#pragma clang diagnostic push
115#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
116__attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
117    .common =
118        {
119            .tag = HARDWARE_MODULE_TAG,
120            .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
121            .hal_api_version = HARDWARE_HAL_API_VERSION,
122            .id = HWVULKAN_HARDWARE_MODULE_ID,
123            .name = "Null Vulkan Driver",
124            .author = "The Android Open Source Project",
125            .methods = &nulldrv_module_methods,
126        },
127};
128#pragma clang diagnostic pop
129
130// -----------------------------------------------------------------------------
131
132namespace {
133
134int CloseDevice(struct hw_device_t* /*device*/) {
135    // nothing to do - opening a device doesn't allocate any resources
136    return 0;
137}
138
139hwvulkan_device_t nulldrv_device = {
140    .common =
141        {
142            .tag = HARDWARE_DEVICE_TAG,
143            .version = HWVULKAN_DEVICE_API_VERSION_0_1,
144            .module = &HAL_MODULE_INFO_SYM.common,
145            .close = CloseDevice,
146        },
147    .EnumerateInstanceExtensionProperties =
148        EnumerateInstanceExtensionProperties,
149    .CreateInstance = CreateInstance,
150    .GetInstanceProcAddr = GetInstanceProcAddr};
151
152int OpenDevice(const hw_module_t* /*module*/,
153               const char* id,
154               hw_device_t** device) {
155    if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
156        *device = &nulldrv_device.common;
157        return 0;
158    }
159    return -ENOENT;
160}
161
162VkInstance_T* GetInstanceFromPhysicalDevice(
163    VkPhysicalDevice_T* physical_device) {
164    return reinterpret_cast<VkInstance_T*>(
165        reinterpret_cast<uintptr_t>(physical_device) -
166        offsetof(VkInstance_T, physical_device));
167}
168
169uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
170    const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
171    ALOGE_IF(*next_handle == kHandleMask,
172             "non-dispatchable handles of type=%" PRIu64
173             " are about to overflow",
174             type);
175    return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
176           ((*next_handle)++ & kHandleMask);
177}
178
179template <class Handle>
180Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
181    return reinterpret_cast<Handle>(
182        AllocHandle(type, &instance->next_callback_handle));
183}
184
185template <class Handle>
186Handle AllocHandle(VkDevice device, HandleType::Enum type) {
187    return reinterpret_cast<Handle>(
188        AllocHandle(type, &device->next_handle[type]));
189}
190
191}  // namespace
192
193namespace null_driver {
194
195#define DEFINE_OBJECT_HANDLE_CONVERSION(T)              \
196    T* Get##T##FromHandle(Vk##T h);                     \
197    T* Get##T##FromHandle(Vk##T h) {                    \
198        return reinterpret_cast<T*>(uintptr_t(h));      \
199    }                                                   \
200    Vk##T GetHandleTo##T(const T* obj);                 \
201    Vk##T GetHandleTo##T(const T* obj) {                \
202        return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
203    }
204
205// -----------------------------------------------------------------------------
206// Global
207
208VKAPI_ATTR
209VkResult EnumerateInstanceExtensionProperties(
210    const char* layer_name,
211    uint32_t* count,
212    VkExtensionProperties* properties) {
213    if (layer_name) {
214        ALOGW(
215            "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
216            "with a layer name ('%s')",
217            layer_name);
218        *count = 0;
219        return VK_SUCCESS;
220    }
221
222    const VkExtensionProperties kExtensions[] = {
223        {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
224    const uint32_t kExtensionsCount =
225        sizeof(kExtensions) / sizeof(kExtensions[0]);
226
227    if (!properties || *count > kExtensionsCount)
228        *count = kExtensionsCount;
229    if (properties)
230        std::copy(kExtensions, kExtensions + *count, properties);
231    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
232}
233
234VKAPI_ATTR
235VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
236                        const VkAllocationCallbacks* allocator,
237                        VkInstance* out_instance) {
238    // Assume the loader provided alloc callbacks even if the app didn't.
239    ALOG_ASSERT(
240        allocator,
241        "Missing alloc callbacks, loader or app should have provided them");
242
243    VkInstance_T* instance =
244        static_cast<VkInstance_T*>(allocator->pfnAllocation(
245            allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
246            VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
247    if (!instance)
248        return VK_ERROR_OUT_OF_HOST_MEMORY;
249
250    instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
251    instance->allocator = *allocator;
252    instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
253    instance->next_callback_handle = 0;
254    instance->debug_report_enabled = false;
255
256    for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
257        if (strcmp(create_info->ppEnabledExtensionNames[i],
258                   VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
259            ALOGV("Enabling " VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
260            instance->debug_report_enabled = true;
261        }
262    }
263
264    *out_instance = instance;
265    return VK_SUCCESS;
266}
267
268VKAPI_ATTR
269PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
270    return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
271}
272
273VKAPI_ATTR
274PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
275    return GetInstanceProcAddr(name);
276}
277
278// -----------------------------------------------------------------------------
279// Instance
280
281void DestroyInstance(VkInstance instance,
282                     const VkAllocationCallbacks* /*allocator*/) {
283    instance->allocator.pfnFree(instance->allocator.pUserData, instance);
284}
285
286// -----------------------------------------------------------------------------
287// PhysicalDevice
288
289VkResult EnumeratePhysicalDevices(VkInstance instance,
290                                  uint32_t* physical_device_count,
291                                  VkPhysicalDevice* physical_devices) {
292    if (physical_devices && *physical_device_count >= 1)
293        physical_devices[0] = &instance->physical_device;
294    *physical_device_count = 1;
295    return VK_SUCCESS;
296}
297
298VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
299                                        uint32_t* count,
300                                        VkLayerProperties* /*properties*/) {
301    ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
302    *count = 0;
303    return VK_SUCCESS;
304}
305
306VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
307                                            const char* layer_name,
308                                            uint32_t* count,
309                                            VkExtensionProperties* properties) {
310    if (layer_name) {
311        ALOGW(
312            "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
313            "with a layer name ('%s')",
314            layer_name);
315        *count = 0;
316        return VK_SUCCESS;
317    }
318
319    const VkExtensionProperties kExtensions[] = {
320        {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
321         VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
322    const uint32_t kExtensionsCount =
323        sizeof(kExtensions) / sizeof(kExtensions[0]);
324
325    if (!properties || *count > kExtensionsCount)
326        *count = kExtensionsCount;
327    if (properties)
328        std::copy(kExtensions, kExtensions + *count, properties);
329    return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
330}
331
332void GetPhysicalDeviceProperties(VkPhysicalDevice,
333                                 VkPhysicalDeviceProperties* properties) {
334    properties->apiVersion = VK_API_VERSION;
335    properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
336    properties->vendorID = 0;
337    properties->deviceID = 0;
338    properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
339    strcpy(properties->deviceName, "Android Vulkan Null Driver");
340    memset(properties->pipelineCacheUUID, 0,
341           sizeof(properties->pipelineCacheUUID));
342}
343
344void GetPhysicalDeviceQueueFamilyProperties(
345    VkPhysicalDevice,
346    uint32_t* count,
347    VkQueueFamilyProperties* properties) {
348    if (!properties || *count > 1)
349        *count = 1;
350    if (properties && *count == 1) {
351        properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
352                                 VK_QUEUE_TRANSFER_BIT;
353        properties->queueCount = 1;
354        properties->timestampValidBits = 64;
355        properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
356    }
357}
358
359void GetPhysicalDeviceMemoryProperties(
360    VkPhysicalDevice,
361    VkPhysicalDeviceMemoryProperties* properties) {
362    properties->memoryTypeCount = 1;
363    properties->memoryTypes[0].propertyFlags =
364        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
365        VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
366        VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
367        VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
368    properties->memoryTypes[0].heapIndex = 0;
369    properties->memoryHeapCount = 1;
370    properties->memoryHeaps[0].size = kMaxDeviceMemory;
371    properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
372}
373
374// -----------------------------------------------------------------------------
375// Device
376
377VkResult CreateDevice(VkPhysicalDevice physical_device,
378                      const VkDeviceCreateInfo*,
379                      const VkAllocationCallbacks* allocator,
380                      VkDevice* out_device) {
381    VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
382    if (!allocator)
383        allocator = &instance->allocator;
384    VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
385        allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
386        VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
387    if (!device)
388        return VK_ERROR_OUT_OF_HOST_MEMORY;
389
390    device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
391    device->allocator = *allocator;
392    device->instance = instance;
393    device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
394    std::fill(device->next_handle.begin(), device->next_handle.end(),
395              UINT64_C(0));
396
397    *out_device = device;
398    return VK_SUCCESS;
399}
400
401void DestroyDevice(VkDevice device,
402                   const VkAllocationCallbacks* /*allocator*/) {
403    if (!device)
404        return;
405    device->allocator.pfnFree(device->allocator.pUserData, device);
406}
407
408void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
409    *queue = &device->queue;
410}
411
412// -----------------------------------------------------------------------------
413// CommandPool
414
415struct CommandPool {
416    typedef VkCommandPool HandleType;
417    VkAllocationCallbacks allocator;
418};
419DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
420
421VkResult CreateCommandPool(VkDevice device,
422                           const VkCommandPoolCreateInfo* /*create_info*/,
423                           const VkAllocationCallbacks* allocator,
424                           VkCommandPool* cmd_pool) {
425    if (!allocator)
426        allocator = &device->allocator;
427    CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
428        allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
429        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
430    if (!pool)
431        return VK_ERROR_OUT_OF_HOST_MEMORY;
432    pool->allocator = *allocator;
433    *cmd_pool = GetHandleToCommandPool(pool);
434    return VK_SUCCESS;
435}
436
437void DestroyCommandPool(VkDevice /*device*/,
438                        VkCommandPool cmd_pool,
439                        const VkAllocationCallbacks* /*allocator*/) {
440    CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
441    pool->allocator.pfnFree(pool->allocator.pUserData, pool);
442}
443
444// -----------------------------------------------------------------------------
445// CmdBuffer
446
447VkResult AllocateCommandBuffers(VkDevice /*device*/,
448                                const VkCommandBufferAllocateInfo* alloc_info,
449                                VkCommandBuffer* cmdbufs) {
450    VkResult result = VK_SUCCESS;
451    CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
452    std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
453    for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
454        cmdbufs[i] =
455            static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
456                pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
457                alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
458        if (!cmdbufs[i]) {
459            result = VK_ERROR_OUT_OF_HOST_MEMORY;
460            break;
461        }
462        cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
463    }
464    if (result != VK_SUCCESS) {
465        for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
466            if (!cmdbufs[i])
467                break;
468            pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
469        }
470    }
471    return result;
472}
473
474void FreeCommandBuffers(VkDevice /*device*/,
475                        VkCommandPool cmd_pool,
476                        uint32_t count,
477                        const VkCommandBuffer* cmdbufs) {
478    CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
479    for (uint32_t i = 0; i < count; i++)
480        pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
481}
482
483// -----------------------------------------------------------------------------
484// DeviceMemory
485
486struct DeviceMemory {
487    typedef VkDeviceMemory HandleType;
488    VkDeviceSize size;
489    alignas(16) uint8_t data[0];
490};
491DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
492
493VkResult AllocateMemory(VkDevice device,
494                        const VkMemoryAllocateInfo* alloc_info,
495                        const VkAllocationCallbacks* allocator,
496                        VkDeviceMemory* mem_handle) {
497    if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
498        return VK_ERROR_OUT_OF_HOST_MEMORY;
499    if (!allocator)
500        allocator = &device->allocator;
501
502    size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
503    DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
504        allocator->pUserData, size, alignof(DeviceMemory),
505        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
506    if (!mem)
507        return VK_ERROR_OUT_OF_HOST_MEMORY;
508    mem->size = size;
509    *mem_handle = GetHandleToDeviceMemory(mem);
510    return VK_SUCCESS;
511}
512
513void FreeMemory(VkDevice device,
514                VkDeviceMemory mem_handle,
515                const VkAllocationCallbacks* allocator) {
516    if (!allocator)
517        allocator = &device->allocator;
518    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
519    allocator->pfnFree(allocator->pUserData, mem);
520}
521
522VkResult MapMemory(VkDevice,
523                   VkDeviceMemory mem_handle,
524                   VkDeviceSize offset,
525                   VkDeviceSize,
526                   VkMemoryMapFlags,
527                   void** out_ptr) {
528    DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
529    *out_ptr = &mem->data[0] + offset;
530    return VK_SUCCESS;
531}
532
533// -----------------------------------------------------------------------------
534// Buffer
535
536struct Buffer {
537    typedef VkBuffer HandleType;
538    VkDeviceSize size;
539};
540DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
541
542VkResult CreateBuffer(VkDevice device,
543                      const VkBufferCreateInfo* create_info,
544                      const VkAllocationCallbacks* allocator,
545                      VkBuffer* buffer_handle) {
546    ALOGW_IF(create_info->size > kMaxDeviceMemory,
547             "CreateBuffer: requested size 0x%" PRIx64
548             " exceeds max device memory size 0x%" PRIx64,
549             create_info->size, kMaxDeviceMemory);
550    if (!allocator)
551        allocator = &device->allocator;
552    Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
553        allocator->pUserData, sizeof(Buffer), alignof(Buffer),
554        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
555    if (!buffer)
556        return VK_ERROR_OUT_OF_HOST_MEMORY;
557    buffer->size = create_info->size;
558    *buffer_handle = GetHandleToBuffer(buffer);
559    return VK_SUCCESS;
560}
561
562void GetBufferMemoryRequirements(VkDevice,
563                                 VkBuffer buffer_handle,
564                                 VkMemoryRequirements* requirements) {
565    Buffer* buffer = GetBufferFromHandle(buffer_handle);
566    requirements->size = buffer->size;
567    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
568    requirements->memoryTypeBits = 0x1;
569}
570
571void DestroyBuffer(VkDevice device,
572                   VkBuffer buffer_handle,
573                   const VkAllocationCallbacks* allocator) {
574    if (!allocator)
575        allocator = &device->allocator;
576    Buffer* buffer = GetBufferFromHandle(buffer_handle);
577    allocator->pfnFree(allocator->pUserData, buffer);
578}
579
580// -----------------------------------------------------------------------------
581// Image
582
583struct Image {
584    typedef VkImage HandleType;
585    VkDeviceSize size;
586};
587DEFINE_OBJECT_HANDLE_CONVERSION(Image)
588
589VkResult CreateImage(VkDevice device,
590                     const VkImageCreateInfo* create_info,
591                     const VkAllocationCallbacks* allocator,
592                     VkImage* image_handle) {
593    if (create_info->imageType != VK_IMAGE_TYPE_2D ||
594        create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
595        create_info->mipLevels != 1) {
596        ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
597              create_info->imageType, create_info->format,
598              create_info->mipLevels);
599        return VK_ERROR_OUT_OF_HOST_MEMORY;
600    }
601
602    VkDeviceSize size =
603        VkDeviceSize(create_info->extent.width * create_info->extent.height) *
604        create_info->arrayLayers * create_info->samples * 4u;
605    ALOGW_IF(size > kMaxDeviceMemory,
606             "CreateImage: image size 0x%" PRIx64
607             " exceeds max device memory size 0x%" PRIx64,
608             size, kMaxDeviceMemory);
609
610    if (!allocator)
611        allocator = &device->allocator;
612    Image* image = static_cast<Image*>(allocator->pfnAllocation(
613        allocator->pUserData, sizeof(Image), alignof(Image),
614        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
615    if (!image)
616        return VK_ERROR_OUT_OF_HOST_MEMORY;
617    image->size = size;
618    *image_handle = GetHandleToImage(image);
619    return VK_SUCCESS;
620}
621
622void GetImageMemoryRequirements(VkDevice,
623                                VkImage image_handle,
624                                VkMemoryRequirements* requirements) {
625    Image* image = GetImageFromHandle(image_handle);
626    requirements->size = image->size;
627    requirements->alignment = 16;  // allow fast Neon/SSE memcpy
628    requirements->memoryTypeBits = 0x1;
629}
630
631void DestroyImage(VkDevice device,
632                  VkImage image_handle,
633                  const VkAllocationCallbacks* allocator) {
634    if (!allocator)
635        allocator = &device->allocator;
636    Image* image = GetImageFromHandle(image_handle);
637    allocator->pfnFree(allocator->pUserData, image);
638}
639
640VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
641                                         VkFormat,
642                                         VkImageUsageFlags,
643                                         int* grallocUsage) {
644    // The null driver never reads or writes the gralloc buffer
645    *grallocUsage = 0;
646    return VK_SUCCESS;
647}
648
649VkResult AcquireImageANDROID(VkDevice,
650                             VkImage,
651                             int fence,
652                             VkSemaphore,
653                             VkFence) {
654    close(fence);
655    return VK_SUCCESS;
656}
657
658VkResult QueueSignalReleaseImageANDROID(VkQueue,
659                                        uint32_t,
660                                        const VkSemaphore*,
661                                        VkImage,
662                                        int* fence) {
663    *fence = -1;
664    return VK_SUCCESS;
665}
666
667// -----------------------------------------------------------------------------
668// No-op types
669
670VkResult CreateBufferView(VkDevice device,
671                          const VkBufferViewCreateInfo*,
672                          const VkAllocationCallbacks* /*allocator*/,
673                          VkBufferView* view) {
674    *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
675    return VK_SUCCESS;
676}
677
678VkResult CreateDescriptorPool(VkDevice device,
679                              const VkDescriptorPoolCreateInfo*,
680                              const VkAllocationCallbacks* /*allocator*/,
681                              VkDescriptorPool* pool) {
682    *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
683    return VK_SUCCESS;
684}
685
686VkResult AllocateDescriptorSets(VkDevice device,
687                                const VkDescriptorSetAllocateInfo* alloc_info,
688                                VkDescriptorSet* descriptor_sets) {
689    for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
690        descriptor_sets[i] =
691            AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
692    return VK_SUCCESS;
693}
694
695VkResult CreateDescriptorSetLayout(VkDevice device,
696                                   const VkDescriptorSetLayoutCreateInfo*,
697                                   const VkAllocationCallbacks* /*allocator*/,
698                                   VkDescriptorSetLayout* layout) {
699    *layout = AllocHandle<VkDescriptorSetLayout>(
700        device, HandleType::kDescriptorSetLayout);
701    return VK_SUCCESS;
702}
703
704VkResult CreateEvent(VkDevice device,
705                     const VkEventCreateInfo*,
706                     const VkAllocationCallbacks* /*allocator*/,
707                     VkEvent* event) {
708    *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
709    return VK_SUCCESS;
710}
711
712VkResult CreateFence(VkDevice device,
713                     const VkFenceCreateInfo*,
714                     const VkAllocationCallbacks* /*allocator*/,
715                     VkFence* fence) {
716    *fence = AllocHandle<VkFence>(device, HandleType::kFence);
717    return VK_SUCCESS;
718}
719
720VkResult CreateFramebuffer(VkDevice device,
721                           const VkFramebufferCreateInfo*,
722                           const VkAllocationCallbacks* /*allocator*/,
723                           VkFramebuffer* framebuffer) {
724    *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
725    return VK_SUCCESS;
726}
727
728VkResult CreateImageView(VkDevice device,
729                         const VkImageViewCreateInfo*,
730                         const VkAllocationCallbacks* /*allocator*/,
731                         VkImageView* view) {
732    *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
733    return VK_SUCCESS;
734}
735
736VkResult CreateGraphicsPipelines(VkDevice device,
737                                 VkPipelineCache,
738                                 uint32_t count,
739                                 const VkGraphicsPipelineCreateInfo*,
740                                 const VkAllocationCallbacks* /*allocator*/,
741                                 VkPipeline* pipelines) {
742    for (uint32_t i = 0; i < count; i++)
743        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
744    return VK_SUCCESS;
745}
746
747VkResult CreateComputePipelines(VkDevice device,
748                                VkPipelineCache,
749                                uint32_t count,
750                                const VkComputePipelineCreateInfo*,
751                                const VkAllocationCallbacks* /*allocator*/,
752                                VkPipeline* pipelines) {
753    for (uint32_t i = 0; i < count; i++)
754        pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
755    return VK_SUCCESS;
756}
757
758VkResult CreatePipelineCache(VkDevice device,
759                             const VkPipelineCacheCreateInfo*,
760                             const VkAllocationCallbacks* /*allocator*/,
761                             VkPipelineCache* cache) {
762    *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
763    return VK_SUCCESS;
764}
765
766VkResult CreatePipelineLayout(VkDevice device,
767                              const VkPipelineLayoutCreateInfo*,
768                              const VkAllocationCallbacks* /*allocator*/,
769                              VkPipelineLayout* layout) {
770    *layout =
771        AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
772    return VK_SUCCESS;
773}
774
775VkResult CreateQueryPool(VkDevice device,
776                         const VkQueryPoolCreateInfo*,
777                         const VkAllocationCallbacks* /*allocator*/,
778                         VkQueryPool* pool) {
779    *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
780    return VK_SUCCESS;
781}
782
783VkResult CreateRenderPass(VkDevice device,
784                          const VkRenderPassCreateInfo*,
785                          const VkAllocationCallbacks* /*allocator*/,
786                          VkRenderPass* renderpass) {
787    *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
788    return VK_SUCCESS;
789}
790
791VkResult CreateSampler(VkDevice device,
792                       const VkSamplerCreateInfo*,
793                       const VkAllocationCallbacks* /*allocator*/,
794                       VkSampler* sampler) {
795    *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
796    return VK_SUCCESS;
797}
798
799VkResult CreateSemaphore(VkDevice device,
800                         const VkSemaphoreCreateInfo*,
801                         const VkAllocationCallbacks* /*allocator*/,
802                         VkSemaphore* semaphore) {
803    *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
804    return VK_SUCCESS;
805}
806
807VkResult CreateShaderModule(VkDevice device,
808                            const VkShaderModuleCreateInfo*,
809                            const VkAllocationCallbacks* /*allocator*/,
810                            VkShaderModule* module) {
811    *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
812    return VK_SUCCESS;
813}
814
815VkResult CreateDebugReportCallbackEXT(VkInstance instance,
816                                      const VkDebugReportCallbackCreateInfoEXT*,
817                                      const VkAllocationCallbacks*,
818                                      VkDebugReportCallbackEXT* callback) {
819    *callback = AllocHandle<VkDebugReportCallbackEXT>(
820        instance, HandleType::kDebugReportCallbackEXT);
821    return VK_SUCCESS;
822}
823
824// -----------------------------------------------------------------------------
825// No-op entrypoints
826
827// clang-format off
828#pragma clang diagnostic push
829#pragma clang diagnostic ignored "-Wunused-parameter"
830
831void GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) {
832    ALOGV("TODO: vk%s", __FUNCTION__);
833}
834
835void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
836    ALOGV("TODO: vk%s", __FUNCTION__);
837}
838
839VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
840    ALOGV("TODO: vk%s", __FUNCTION__);
841    return VK_SUCCESS;
842}
843
844VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
845    ALOGV("TODO: vk%s", __FUNCTION__);
846    return VK_SUCCESS;
847}
848
849VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
850    return VK_SUCCESS;
851}
852
853VkResult QueueWaitIdle(VkQueue queue) {
854    ALOGV("TODO: vk%s", __FUNCTION__);
855    return VK_SUCCESS;
856}
857
858VkResult DeviceWaitIdle(VkDevice device) {
859    ALOGV("TODO: vk%s", __FUNCTION__);
860    return VK_SUCCESS;
861}
862
863void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
864}
865
866VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
867    ALOGV("TODO: vk%s", __FUNCTION__);
868    return VK_SUCCESS;
869}
870
871VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
872    ALOGV("TODO: vk%s", __FUNCTION__);
873    return VK_SUCCESS;
874}
875
876void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
877    ALOGV("TODO: vk%s", __FUNCTION__);
878}
879
880VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
881    return VK_SUCCESS;
882}
883
884VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
885    return VK_SUCCESS;
886}
887
888void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
889    ALOGV("TODO: vk%s", __FUNCTION__);
890}
891
892void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
893    ALOGV("TODO: vk%s", __FUNCTION__);
894}
895
896VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
897    ALOGV("TODO: vk%s", __FUNCTION__);
898    return VK_SUCCESS;
899}
900
901void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
902}
903
904VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
905    return VK_SUCCESS;
906}
907
908VkResult GetFenceStatus(VkDevice device, VkFence fence) {
909    ALOGV("TODO: vk%s", __FUNCTION__);
910    return VK_SUCCESS;
911}
912
913VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
914    return VK_SUCCESS;
915}
916
917void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
918}
919
920void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
921}
922
923VkResult GetEventStatus(VkDevice device, VkEvent event) {
924    ALOGV("TODO: vk%s", __FUNCTION__);
925    return VK_SUCCESS;
926}
927
928VkResult SetEvent(VkDevice device, VkEvent event) {
929    ALOGV("TODO: vk%s", __FUNCTION__);
930    return VK_SUCCESS;
931}
932
933VkResult ResetEvent(VkDevice device, VkEvent event) {
934    ALOGV("TODO: vk%s", __FUNCTION__);
935    return VK_SUCCESS;
936}
937
938void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
939}
940
941VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
942    ALOGV("TODO: vk%s", __FUNCTION__);
943    return VK_SUCCESS;
944}
945
946void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
947}
948
949void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
950    ALOGV("TODO: vk%s", __FUNCTION__);
951}
952
953void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
954}
955
956void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
957}
958
959void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
960}
961
962VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
963    ALOGV("TODO: vk%s", __FUNCTION__);
964    return VK_SUCCESS;
965}
966
967VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
968    ALOGV("TODO: vk%s", __FUNCTION__);
969    return VK_SUCCESS;
970}
971
972void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
973}
974
975void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
976}
977
978void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
979}
980
981void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
982}
983
984void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
985}
986
987VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
988    ALOGV("TODO: vk%s", __FUNCTION__);
989    return VK_SUCCESS;
990}
991
992void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
993    ALOGV("TODO: vk%s", __FUNCTION__);
994}
995
996VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
997    ALOGV("TODO: vk%s", __FUNCTION__);
998    return VK_SUCCESS;
999}
1000
1001void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1002}
1003
1004void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1005}
1006
1007void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1008    ALOGV("TODO: vk%s", __FUNCTION__);
1009}
1010
1011VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1012    ALOGV("TODO: vk%s", __FUNCTION__);
1013    return VK_SUCCESS;
1014}
1015
1016VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1017    return VK_SUCCESS;
1018}
1019
1020VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1021    return VK_SUCCESS;
1022}
1023
1024VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1025    ALOGV("TODO: vk%s", __FUNCTION__);
1026    return VK_SUCCESS;
1027}
1028
1029void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1030}
1031
1032void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1033}
1034
1035void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1036}
1037
1038void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1039}
1040
1041void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1042}
1043
1044void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1045}
1046
1047void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1048}
1049
1050void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1051}
1052
1053void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1054}
1055
1056void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1057}
1058
1059void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1060}
1061
1062void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1063}
1064
1065void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1066}
1067
1068void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1069}
1070
1071void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1072}
1073
1074void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1075}
1076
1077void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1078}
1079
1080void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1081}
1082
1083void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1084}
1085
1086void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1087}
1088
1089void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1090}
1091
1092void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1093}
1094
1095void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1096}
1097
1098void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1099}
1100
1101void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const uint32_t* pData) {
1102}
1103
1104void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1105}
1106
1107void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1108}
1109
1110void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1111}
1112
1113void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1114}
1115
1116void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1117}
1118
1119void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1120}
1121
1122void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1123}
1124
1125void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1126}
1127
1128void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1129}
1130
1131void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1132}
1133
1134void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1135}
1136
1137void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1138}
1139
1140void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1141}
1142
1143void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1144}
1145
1146void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1147}
1148
1149void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1150}
1151
1152void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1153}
1154
1155void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1156}
1157
1158void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1159}
1160
1161void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1162}
1163
1164void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1165}
1166
1167#pragma clang diagnostic pop
1168// clang-format on
1169
1170}  // namespace null_driver
1171